diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java index c816bab..c3efcfb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -326,7 +326,7 @@ public class ServerName implements Comparable { int prefixLen = ProtobufUtil.lengthOfPBMagic(); try { RootRegionServer rss = - RootRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build(); + RootRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer(); return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode()); } catch (InvalidProtocolBufferException e) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 199ce29..aa4c4ce 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -41,6 +41,7 @@ import com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; +import com.google.protobuf.Parser; import com.google.protobuf.RpcChannel; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -1834,17 +1835,19 @@ public final class ProtobufUtil { } public static ScanMetrics toScanMetrics(final byte[] bytes) { - MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder(); + Parser parser = MapReduceProtos.ScanMetrics.PARSER; + MapReduceProtos.ScanMetrics pScanMetrics = null; try { - builder.mergeFrom(bytes); + pScanMetrics = parser.parseFrom(bytes); } catch (InvalidProtocolBufferException e) { //Ignored there are just no key values to add. } - MapReduceProtos.ScanMetrics pScanMetrics = builder.build(); ScanMetrics scanMetrics = new ScanMetrics(); - for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { - if (pair.hasName() && pair.hasValue()) { - scanMetrics.setCounter(pair.getName(), pair.getValue()); + if (pScanMetrics != null) { + for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { + if (pair.hasName() && pair.hasValue()) { + scanMetrics.setCounter(pair.getName(), pair.getValue()); + } } } return scanMetrics; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 8586050..56fb92c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -819,24 +819,34 @@ public final class RequestConverter { return builder.build(); } + /** + * @see {@link #buildRollWALWriterRequest() + */ + private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST = + RollWALWriterRequest.newBuilder().build(); + /** * Create a new RollWALWriterRequest * * @return a ReplicateWALEntryRequest */ public static RollWALWriterRequest buildRollWALWriterRequest() { - RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder(); - return builder.build(); + return ROLL_WAL_WRITER_REQUEST; } /** + * @see {@link #buildGetServerInfoRequest()} + */ + private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = + GetServerInfoRequest.newBuilder().build(); + + /** * Create a new GetServerInfoRequest * * @return a GetServerInfoRequest */ public static GetServerInfoRequest buildGetServerInfoRequest() { - GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder(); - return builder.build(); + return GET_SERVER_INFO_REQUEST; } /** @@ -1140,20 +1150,32 @@ public final class RequestConverter { } /** + * @see {@link #buildGetClusterStatusRequest} + */ + private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST = + GetClusterStatusRequest.newBuilder().build(); + + /** * Creates a protocol buffer GetClusterStatusRequest * * @return A GetClusterStatusRequest */ public static GetClusterStatusRequest buildGetClusterStatusRequest() { - return GetClusterStatusRequest.newBuilder().build(); + return GET_CLUSTER_STATUS_REQUEST; } /** + * @see {@link #buildCatalogScanRequest} + */ + private static final CatalogScanRequest CATALOG_SCAN_REQUEST = + CatalogScanRequest.newBuilder().build(); + + /** * Creates a request for running a catalog scan * @return A {@link CatalogScanRequest} */ public static CatalogScanRequest buildCatalogScanRequest() { - return CatalogScanRequest.newBuilder().build(); + return CATALOG_SCAN_REQUEST; } /** @@ -1165,11 +1187,17 @@ public final class RequestConverter { } /** + * @see {@link #buildIsCatalogJanitorEnabledRequest()} + */ + private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST = + IsCatalogJanitorEnabledRequest.newBuilder().build(); + + /** * Creates a request for querying the master whether the catalog janitor is enabled * @return A {@link IsCatalogJanitorEnabledRequest} */ public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() { - return IsCatalogJanitorEnabledRequest.newBuilder().build(); + return IS_CATALOG_JANITOR_ENABLED_REQUEST; } /** @@ -1267,4 +1295,4 @@ public final class RequestConverter { } return builder.build(); } -} +} \ No newline at end of file diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java index 49cccfc..7965988 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java @@ -10,70 +10,243 @@ public final class AccessControlProtos { } public interface PermissionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Permission.Action action = 1; + /** + * repeated .Permission.Action action = 1; + */ java.util.List getActionList(); + /** + * repeated .Permission.Action action = 1; + */ int getActionCount(); + /** + * repeated .Permission.Action action = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index); - + // optional bytes table = 2; + /** + * optional bytes table = 2; + */ boolean hasTable(); + /** + * optional bytes table = 2; + */ com.google.protobuf.ByteString getTable(); - + // optional bytes family = 3; + /** + * optional bytes family = 3; + */ boolean hasFamily(); + /** + * optional bytes family = 3; + */ com.google.protobuf.ByteString getFamily(); - + // optional bytes qualifier = 4; + /** + * optional bytes qualifier = 4; + */ boolean hasQualifier(); + /** + * optional bytes qualifier = 4; + */ com.google.protobuf.ByteString getQualifier(); } + /** + * Protobuf type {@code Permission} + */ public static final class Permission extends com.google.protobuf.GeneratedMessage implements PermissionOrBuilder { // Use Permission.newBuilder() to construct. - private Permission(Builder builder) { + private Permission(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Permission(boolean noInit) {} - + private Permission(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Permission defaultInstance; public static Permission getDefaultInstance() { return defaultInstance; } - + public Permission getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Permission( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + action_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + action_.add(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + action_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + action_.add(value); + } + } + input.popLimit(oldLimit); + break; + } + case 18: { + bitField0_ |= 0x00000001; + table_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + action_ = java.util.Collections.unmodifiableList(action_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Permission parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Permission(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code Permission.Action} + */ public enum Action implements com.google.protobuf.ProtocolMessageEnum { + /** + * READ = 0; + */ READ(0, 0), + /** + * WRITE = 1; + */ WRITE(1, 1), + /** + * EXEC = 2; + */ EXEC(2, 2), + /** + * CREATE = 3; + */ CREATE(3, 3), + /** + * ADMIN = 4; + */ ADMIN(4, 4), ; - + + /** + * READ = 0; + */ public static final int READ_VALUE = 0; + /** + * WRITE = 1; + */ public static final int WRITE_VALUE = 1; + /** + * EXEC = 2; + */ public static final int EXEC_VALUE = 2; + /** + * CREATE = 3; + */ public static final int CREATE_VALUE = 3; + /** + * ADMIN = 4; + */ public static final int ADMIN_VALUE = 4; - - + + public final int getNumber() { return value; } - + public static Action valueOf(int value) { switch (value) { case 0: return READ; @@ -84,7 +257,7 @@ public final class AccessControlProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -96,7 +269,7 @@ public final class AccessControlProtos { return Action.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -109,11 +282,9 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(0); } - - private static final Action[] VALUES = { - READ, WRITE, EXEC, CREATE, ADMIN, - }; - + + private static final Action[] VALUES = values(); + public static Action valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -122,62 +293,89 @@ public final class AccessControlProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private Action(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:Permission.Action) } - + private int bitField0_; // repeated .Permission.Action action = 1; public static final int ACTION_FIELD_NUMBER = 1; private java.util.List action_; + /** + * repeated .Permission.Action action = 1; + */ public java.util.List getActionList() { return action_; } + /** + * repeated .Permission.Action action = 1; + */ public int getActionCount() { return action_.size(); } + /** + * repeated .Permission.Action action = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { return action_.get(index); } - + // optional bytes table = 2; public static final int TABLE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString table_; + /** + * optional bytes table = 2; + */ public boolean hasTable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes table = 2; + */ public com.google.protobuf.ByteString getTable() { return table_; } - + // optional bytes family = 3; public static final int FAMILY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString family_; + /** + * optional bytes family = 3; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes family = 3; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // optional bytes qualifier = 4; public static final int QUALIFIER_FIELD_NUMBER = 4; private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 4; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes qualifier = 4; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + private void initFields() { action_ = java.util.Collections.emptyList(); table_ = com.google.protobuf.ByteString.EMPTY; @@ -188,11 +386,11 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -210,12 +408,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -242,14 +440,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -259,7 +457,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) obj; - + boolean result = true; result = result && getActionList() .equals(other.getActionList()); @@ -282,9 +480,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getActionCount() > 0) { @@ -304,89 +506,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getQualifier().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Permission} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder { @@ -394,18 +586,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -416,7 +611,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); action_ = java.util.Collections.emptyList(); @@ -429,20 +624,20 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); if (!result.isInitialized()) { @@ -450,17 +645,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission(this); int from_bitField0_ = bitField0_; @@ -486,7 +671,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)other); @@ -495,7 +680,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) return this; if (!other.action_.isEmpty()) { @@ -520,80 +705,30 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addAction(value); - } - break; - } - case 10: { - int length = input.readRawVarint32(); - int oldLimit = input.pushLimit(length); - while(input.getBytesUntilLimit() > 0) { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addAction(value); - } - } - input.popLimit(oldLimit); - break; - } - case 18: { - bitField0_ |= 0x00000002; - table_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - family_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - qualifier_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Permission.Action action = 1; private java.util.List action_ = java.util.Collections.emptyList(); @@ -603,15 +738,27 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; } } + /** + * repeated .Permission.Action action = 1; + */ public java.util.List getActionList() { return java.util.Collections.unmodifiableList(action_); } + /** + * repeated .Permission.Action action = 1; + */ public int getActionCount() { return action_.size(); } + /** + * repeated .Permission.Action action = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { return action_.get(index); } + /** + * repeated .Permission.Action action = 1; + */ public Builder setAction( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { if (value == null) { @@ -622,6 +769,9 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * repeated .Permission.Action action = 1; + */ public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { if (value == null) { throw new NullPointerException(); @@ -631,6 +781,9 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * repeated .Permission.Action action = 1; + */ public Builder addAllAction( java.lang.Iterable values) { ensureActionIsMutable(); @@ -638,21 +791,33 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * repeated .Permission.Action action = 1; + */ public Builder clearAction() { action_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // optional bytes table = 2; private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes table = 2; + */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes table = 2; + */ public com.google.protobuf.ByteString getTable() { return table_; } + /** + * optional bytes table = 2; + */ public Builder setTable(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -662,21 +827,33 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * optional bytes table = 2; + */ public Builder clearTable() { bitField0_ = (bitField0_ & ~0x00000002); table_ = getDefaultInstance().getTable(); onChanged(); return this; } - + // optional bytes family = 3; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes family = 3; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes family = 3; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * optional bytes family = 3; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -686,21 +863,33 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * optional bytes family = 3; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000004); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // optional bytes qualifier = 4; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 4; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bytes qualifier = 4; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * optional bytes qualifier = 4; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -710,88 +899,201 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * optional bytes qualifier = 4; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000008); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Permission) } - + static { defaultInstance = new Permission(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Permission) } - + public interface UserPermissionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes user = 1; + /** + * required bytes user = 1; + */ boolean hasUser(); + /** + * required bytes user = 1; + */ com.google.protobuf.ByteString getUser(); - + // required .Permission permission = 2; + /** + * required .Permission permission = 2; + */ boolean hasPermission(); + /** + * required .Permission permission = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(); + /** + * required .Permission permission = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(); } + /** + * Protobuf type {@code UserPermission} + */ public static final class UserPermission extends com.google.protobuf.GeneratedMessage implements UserPermissionOrBuilder { // Use UserPermission.newBuilder() to construct. - private UserPermission(Builder builder) { + private UserPermission(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserPermission(boolean noInit) {} - + private UserPermission(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserPermission defaultInstance; public static UserPermission getDefaultInstance() { return defaultInstance; } - + public UserPermission getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserPermission( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = permission_.toBuilder(); + } + permission_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(permission_); + permission_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserPermission parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserPermission(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes user = 1; public static final int USER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ public boolean hasUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes user = 1; + */ public com.google.protobuf.ByteString getUser() { return user_; } - + // required .Permission permission = 2; public static final int PERMISSION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_; + /** + * required .Permission permission = 2; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Permission permission = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { return permission_; } + /** + * required .Permission permission = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { return permission_; } - + private void initFields() { user_ = com.google.protobuf.ByteString.EMPTY; permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); @@ -800,7 +1102,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasUser()) { memoizedIsInitialized = 0; return false; @@ -812,7 +1114,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -824,12 +1126,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -843,14 +1145,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -860,7 +1162,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) obj; - + boolean result = true; result = result && (hasUser() == other.hasUser()); if (hasUser()) { @@ -876,9 +1178,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUser()) { @@ -890,89 +1196,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermission().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserPermission} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder { @@ -980,18 +1276,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1003,7 +1302,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); user_ = com.google.protobuf.ByteString.EMPTY; @@ -1016,20 +1315,20 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); if (!result.isInitialized()) { @@ -1037,17 +1336,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission(this); int from_bitField0_ = bitField0_; @@ -1068,7 +1357,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)other); @@ -1077,7 +1366,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) return this; if (other.hasUser()) { @@ -1089,7 +1378,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasUser()) { @@ -1101,58 +1390,43 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - user_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes user = 1; private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ public boolean hasUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes user = 1; + */ public com.google.protobuf.ByteString getUser() { return user_; } + /** + * required bytes user = 1; + */ public Builder setUser(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1162,20 +1436,29 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * required bytes user = 1; + */ public Builder clearUser() { bitField0_ = (bitField0_ & ~0x00000001); user_ = getDefaultInstance().getUser(); onChanged(); return this; } - + // required .Permission permission = 2; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; + /** + * required .Permission permission = 2; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Permission permission = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { if (permissionBuilder_ == null) { return permission_; @@ -1183,6 +1466,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessage(); } } + /** + * required .Permission permission = 2; + */ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionBuilder_ == null) { if (value == null) { @@ -1196,6 +1482,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Permission permission = 2; + */ public Builder setPermission( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -1207,6 +1496,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Permission permission = 2; + */ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -1223,6 +1515,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Permission permission = 2; + */ public Builder clearPermission() { if (permissionBuilder_ == null) { permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); @@ -1233,11 +1528,17 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .Permission permission = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getPermissionFieldBuilder().getBuilder(); } + /** + * required .Permission permission = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { if (permissionBuilder_ != null) { return permissionBuilder_.getMessageOrBuilder(); @@ -1245,6 +1546,9 @@ public final class AccessControlProtos { return permission_; } } + /** + * required .Permission permission = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> getPermissionFieldBuilder() { @@ -1258,136 +1562,349 @@ public final class AccessControlProtos { } return permissionBuilder_; } - + // @@protoc_insertion_point(builder_scope:UserPermission) } - + static { defaultInstance = new UserPermission(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserPermission) } - + public interface UserTablePermissionsOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .UserTablePermissions.UserPermissions permissions = 1; + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ java.util.List getPermissionsList(); + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index); + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ int getPermissionsCount(); + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ java.util.List getPermissionsOrBuilderList(); + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( int index); } + /** + * Protobuf type {@code UserTablePermissions} + * + *
+   **
+   * Content of the /hbase/acl/<table> znode.
+   * 
+ */ public static final class UserTablePermissions extends com.google.protobuf.GeneratedMessage implements UserTablePermissionsOrBuilder { // Use UserTablePermissions.newBuilder() to construct. - private UserTablePermissions(Builder builder) { + private UserTablePermissions(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserTablePermissions(boolean noInit) {} - + private UserTablePermissions(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserTablePermissions defaultInstance; public static UserTablePermissions getDefaultInstance() { return defaultInstance; } - + public UserTablePermissions getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserTablePermissions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permissions_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + permissions_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permissions_ = java.util.Collections.unmodifiableList(permissions_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserTablePermissions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserTablePermissions(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + public interface UserPermissionsOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes user = 1; + /** + * required bytes user = 1; + */ boolean hasUser(); + /** + * required bytes user = 1; + */ com.google.protobuf.ByteString getUser(); - + // repeated .Permission permissions = 2; + /** + * repeated .Permission permissions = 2; + */ java.util.List getPermissionsList(); + /** + * repeated .Permission permissions = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index); + /** + * repeated .Permission permissions = 2; + */ int getPermissionsCount(); + /** + * repeated .Permission permissions = 2; + */ java.util.List getPermissionsOrBuilderList(); + /** + * repeated .Permission permissions = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( int index); } + /** + * Protobuf type {@code UserTablePermissions.UserPermissions} + */ public static final class UserPermissions extends com.google.protobuf.GeneratedMessage implements UserPermissionsOrBuilder { // Use UserPermissions.newBuilder() to construct. - private UserPermissions(Builder builder) { + private UserPermissions(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserPermissions(boolean noInit) {} - + private UserPermissions(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserPermissions defaultInstance; public static UserPermissions getDefaultInstance() { return defaultInstance; } - + public UserPermissions getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserPermissions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + permissions_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + permissions_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + permissions_ = java.util.Collections.unmodifiableList(permissions_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserPermissions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserPermissions(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes user = 1; public static final int USER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ public boolean hasUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes user = 1; + */ public com.google.protobuf.ByteString getUser() { return user_; } - + // repeated .Permission permissions = 2; public static final int PERMISSIONS_FIELD_NUMBER = 2; private java.util.List permissions_; + /** + * repeated .Permission permissions = 2; + */ public java.util.List getPermissionsList() { return permissions_; } + /** + * repeated .Permission permissions = 2; + */ public java.util.List getPermissionsOrBuilderList() { return permissions_; } + /** + * repeated .Permission permissions = 2; + */ public int getPermissionsCount() { return permissions_.size(); } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { return permissions_.get(index); } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( int index) { return permissions_.get(index); } - + private void initFields() { user_ = com.google.protobuf.ByteString.EMPTY; permissions_ = java.util.Collections.emptyList(); @@ -1396,7 +1913,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasUser()) { memoizedIsInitialized = 0; return false; @@ -1404,7 +1921,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1416,12 +1933,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1435,14 +1952,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1452,7 +1969,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) obj; - + boolean result = true; result = result && (hasUser() == other.hasUser()); if (hasUser()) { @@ -1465,9 +1982,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUser()) { @@ -1479,89 +2000,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermissionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserTablePermissions.UserPermissions} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder { @@ -1569,18 +2080,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1592,7 +2106,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); user_ = com.google.protobuf.ByteString.EMPTY; @@ -1605,20 +2119,20 @@ public final class AccessControlProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); if (!result.isInitialized()) { @@ -1626,17 +2140,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions(this); int from_bitField0_ = bitField0_; @@ -1658,7 +2162,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)other); @@ -1667,7 +2171,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()) return this; if (other.hasUser()) { @@ -1702,7 +2206,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasUser()) { @@ -1710,55 +2214,43 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - user_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermissions(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes user = 1; private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ public boolean hasUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes user = 1; + */ public com.google.protobuf.ByteString getUser() { return user_; } + /** + * required bytes user = 1; + */ public Builder setUser(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1768,13 +2260,16 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * required bytes user = 1; + */ public Builder clearUser() { bitField0_ = (bitField0_ & ~0x00000001); user_ = getDefaultInstance().getUser(); onChanged(); return this; } - + // repeated .Permission permissions = 2; private java.util.List permissions_ = java.util.Collections.emptyList(); @@ -1784,10 +2279,13 @@ public final class AccessControlProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionsBuilder_; - + + /** + * repeated .Permission permissions = 2; + */ public java.util.List getPermissionsList() { if (permissionsBuilder_ == null) { return java.util.Collections.unmodifiableList(permissions_); @@ -1795,6 +2293,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessageList(); } } + /** + * repeated .Permission permissions = 2; + */ public int getPermissionsCount() { if (permissionsBuilder_ == null) { return permissions_.size(); @@ -1802,6 +2303,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getCount(); } } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { if (permissionsBuilder_ == null) { return permissions_.get(index); @@ -1809,6 +2313,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessage(index); } } + /** + * repeated .Permission permissions = 2; + */ public Builder setPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionsBuilder_ == null) { @@ -1823,6 +2330,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder setPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -1834,6 +2344,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionsBuilder_ == null) { if (value == null) { @@ -1847,6 +2360,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder addPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionsBuilder_ == null) { @@ -1861,6 +2377,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder addPermissions( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -1872,6 +2391,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder addPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -1883,6 +2405,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder addAllPermissions( java.lang.Iterable values) { if (permissionsBuilder_ == null) { @@ -1894,6 +2419,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder clearPermissions() { if (permissionsBuilder_ == null) { permissions_ = java.util.Collections.emptyList(); @@ -1904,6 +2432,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public Builder removePermissions(int index) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); @@ -1914,10 +2445,16 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionsBuilder( int index) { return getPermissionsFieldBuilder().getBuilder(index); } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( int index) { if (permissionsBuilder_ == null) { @@ -1925,6 +2462,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Permission permissions = 2; + */ public java.util.List getPermissionsOrBuilderList() { if (permissionsBuilder_ != null) { @@ -1933,15 +2473,24 @@ public final class AccessControlProtos { return java.util.Collections.unmodifiableList(permissions_); } } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder() { return getPermissionsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); } + /** + * repeated .Permission permissions = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder( int index) { return getPermissionsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); } + /** + * repeated .Permission permissions = 2; + */ public java.util.List getPermissionsBuilderList() { return getPermissionsFieldBuilder().getBuilderList(); @@ -1960,39 +2509,54 @@ public final class AccessControlProtos { } return permissionsBuilder_; } - + // @@protoc_insertion_point(builder_scope:UserTablePermissions.UserPermissions) } - + static { defaultInstance = new UserPermissions(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserTablePermissions.UserPermissions) } - + // repeated .UserTablePermissions.UserPermissions permissions = 1; public static final int PERMISSIONS_FIELD_NUMBER = 1; private java.util.List permissions_; + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public java.util.List getPermissionsList() { return permissions_; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public java.util.List getPermissionsOrBuilderList() { return permissions_; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public int getPermissionsCount() { return permissions_.size(); } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { return permissions_.get(index); } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( int index) { return permissions_.get(index); } - + private void initFields() { permissions_ = java.util.Collections.emptyList(); } @@ -2000,7 +2564,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getPermissionsCount(); i++) { if (!getPermissions(i).isInitialized()) { memoizedIsInitialized = 0; @@ -2010,7 +2574,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2019,12 +2583,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < permissions_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -2034,14 +2598,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2051,7 +2615,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) obj; - + boolean result = true; result = result && getPermissionsList() .equals(other.getPermissionsList()); @@ -2059,9 +2623,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getPermissionsCount() > 0) { @@ -2069,89 +2637,84 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermissionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserTablePermissions} + * + *
+     **
+     * Content of the /hbase/acl/<table> znode.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissionsOrBuilder { @@ -2159,18 +2722,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2182,7 +2748,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (permissionsBuilder_ == null) { @@ -2193,20 +2759,20 @@ public final class AccessControlProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); if (!result.isInitialized()) { @@ -2214,17 +2780,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions(this); int from_bitField0_ = bitField0_; @@ -2240,7 +2796,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)other); @@ -2249,7 +2805,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance()) return this; if (permissionsBuilder_ == null) { @@ -2281,7 +2837,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getPermissionsCount(); i++) { if (!getPermissions(i).isInitialized()) { @@ -2291,42 +2847,26 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermissions(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .UserTablePermissions.UserPermissions permissions = 1; private java.util.List permissions_ = java.util.Collections.emptyList(); @@ -2336,10 +2876,13 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> permissionsBuilder_; - + + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public java.util.List getPermissionsList() { if (permissionsBuilder_ == null) { return java.util.Collections.unmodifiableList(permissions_); @@ -2347,6 +2890,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessageList(); } } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public int getPermissionsCount() { if (permissionsBuilder_ == null) { return permissions_.size(); @@ -2354,6 +2900,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getCount(); } } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { if (permissionsBuilder_ == null) { return permissions_.get(index); @@ -2361,6 +2910,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessage(index); } } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder setPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { if (permissionsBuilder_ == null) { @@ -2375,6 +2927,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder setPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -2386,6 +2941,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { if (permissionsBuilder_ == null) { if (value == null) { @@ -2399,6 +2957,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder addPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { if (permissionsBuilder_ == null) { @@ -2413,6 +2974,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder addPermissions( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -2424,6 +2988,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder addPermissions( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { if (permissionsBuilder_ == null) { @@ -2435,6 +3002,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder addAllPermissions( java.lang.Iterable values) { if (permissionsBuilder_ == null) { @@ -2446,6 +3016,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder clearPermissions() { if (permissionsBuilder_ == null) { permissions_ = java.util.Collections.emptyList(); @@ -2456,6 +3029,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public Builder removePermissions(int index) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); @@ -2466,10 +3042,16 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder getPermissionsBuilder( int index) { return getPermissionsFieldBuilder().getBuilder(index); } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( int index) { if (permissionsBuilder_ == null) { @@ -2477,6 +3059,9 @@ public final class AccessControlProtos { return permissionsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public java.util.List getPermissionsOrBuilderList() { if (permissionsBuilder_ != null) { @@ -2485,15 +3070,24 @@ public final class AccessControlProtos { return java.util.Collections.unmodifiableList(permissions_); } } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder() { return getPermissionsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder( int index) { return getPermissionsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); } + /** + * repeated .UserTablePermissions.UserPermissions permissions = 1; + */ public java.util.List getPermissionsBuilderList() { return getPermissionsFieldBuilder().getBuilderList(); @@ -2512,68 +3106,161 @@ public final class AccessControlProtos { } return permissionsBuilder_; } - + // @@protoc_insertion_point(builder_scope:UserTablePermissions) } - + static { defaultInstance = new UserTablePermissions(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserTablePermissions) } - + public interface GrantRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .UserPermission permission = 1; + /** + * required .UserPermission permission = 1; + */ boolean hasPermission(); + /** + * required .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); + /** + * required .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); } + /** + * Protobuf type {@code GrantRequest} + */ public static final class GrantRequest extends com.google.protobuf.GeneratedMessage implements GrantRequestOrBuilder { // Use GrantRequest.newBuilder() to construct. - private GrantRequest(Builder builder) { + private GrantRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GrantRequest(boolean noInit) {} - + private GrantRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GrantRequest defaultInstance; public static GrantRequest getDefaultInstance() { return defaultInstance; } - + public GrantRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GrantRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = permission_.toBuilder(); + } + permission_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(permission_); + permission_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GrantRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GrantRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .UserPermission permission = 1; public static final int PERMISSION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; + /** + * required .UserPermission permission = 1; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { return permission_; } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { return permission_; } - + private void initFields() { permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); } @@ -2581,7 +3268,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPermission()) { memoizedIsInitialized = 0; return false; @@ -2593,7 +3280,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2602,12 +3289,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2617,14 +3304,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2634,7 +3321,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) obj; - + boolean result = true; result = result && (hasPermission() == other.hasPermission()); if (hasPermission()) { @@ -2645,9 +3332,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPermission()) { @@ -2655,89 +3346,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermission().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GrantRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequestOrBuilder { @@ -2745,18 +3426,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2768,7 +3452,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (permissionBuilder_ == null) { @@ -2779,20 +3463,20 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2800,17 +3484,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest(this); int from_bitField0_ = bitField0_; @@ -2827,7 +3501,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)other); @@ -2836,7 +3510,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance()) return this; if (other.hasPermission()) { @@ -2845,7 +3519,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPermission()) { @@ -2857,52 +3531,39 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .UserPermission permission = 1; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; + /** + * required .UserPermission permission = 1; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { if (permissionBuilder_ == null) { return permission_; @@ -2910,6 +3571,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessage(); } } + /** + * required .UserPermission permission = 1; + */ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { if (value == null) { @@ -2923,6 +3587,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder setPermission( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -2934,6 +3601,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2950,6 +3620,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder clearPermission() { if (permissionBuilder_ == null) { permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); @@ -2960,11 +3633,17 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPermissionFieldBuilder().getBuilder(); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { if (permissionBuilder_ != null) { return permissionBuilder_.getMessageOrBuilder(); @@ -2972,6 +3651,9 @@ public final class AccessControlProtos { return permission_; } } + /** + * required .UserPermission permission = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> getPermissionFieldBuilder() { @@ -2985,84 +3667,145 @@ public final class AccessControlProtos { } return permissionBuilder_; } - + // @@protoc_insertion_point(builder_scope:GrantRequest) } - + static { defaultInstance = new GrantRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GrantRequest) } - + public interface GrantResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code GrantResponse} + */ public static final class GrantResponse extends com.google.protobuf.GeneratedMessage implements GrantResponseOrBuilder { // Use GrantResponse.newBuilder() to construct. - private GrantResponse(Builder builder) { + private GrantResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GrantResponse(boolean noInit) {} - + private GrantResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GrantResponse defaultInstance; public static GrantResponse getDefaultInstance() { return defaultInstance; } - + public GrantResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GrantResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GrantResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GrantResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3072,101 +3815,95 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GrantResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponseOrBuilder { @@ -3174,18 +3911,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3196,25 +3936,25 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3222,23 +3962,13 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)other); @@ -3247,106 +3977,189 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:GrantResponse) } - + static { defaultInstance = new GrantResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GrantResponse) } - + public interface RevokeRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .UserPermission permission = 1; + /** + * required .UserPermission permission = 1; + */ boolean hasPermission(); + /** + * required .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); + /** + * required .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); } + /** + * Protobuf type {@code RevokeRequest} + */ public static final class RevokeRequest extends com.google.protobuf.GeneratedMessage implements RevokeRequestOrBuilder { // Use RevokeRequest.newBuilder() to construct. - private RevokeRequest(Builder builder) { + private RevokeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RevokeRequest(boolean noInit) {} - + private RevokeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RevokeRequest defaultInstance; public static RevokeRequest getDefaultInstance() { return defaultInstance; } - + public RevokeRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RevokeRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = permission_.toBuilder(); + } + permission_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(permission_); + permission_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RevokeRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RevokeRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .UserPermission permission = 1; public static final int PERMISSION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; + /** + * required .UserPermission permission = 1; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { return permission_; } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { return permission_; } - + private void initFields() { permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); } @@ -3354,7 +4167,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPermission()) { memoizedIsInitialized = 0; return false; @@ -3366,7 +4179,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3375,12 +4188,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3390,14 +4203,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3407,7 +4220,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) obj; - + boolean result = true; result = result && (hasPermission() == other.hasPermission()); if (hasPermission()) { @@ -3418,9 +4231,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPermission()) { @@ -3428,89 +4245,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermission().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RevokeRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequestOrBuilder { @@ -3518,18 +4325,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3541,7 +4351,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (permissionBuilder_ == null) { @@ -3552,20 +4362,20 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3573,17 +4383,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest(this); int from_bitField0_ = bitField0_; @@ -3600,7 +4400,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)other); @@ -3609,7 +4409,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance()) return this; if (other.hasPermission()) { @@ -3618,7 +4418,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPermission()) { @@ -3630,52 +4430,39 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .UserPermission permission = 1; private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; + /** + * required .UserPermission permission = 1; + */ public boolean hasPermission() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { if (permissionBuilder_ == null) { return permission_; @@ -3683,6 +4470,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessage(); } } + /** + * required .UserPermission permission = 1; + */ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { if (value == null) { @@ -3696,6 +4486,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder setPermission( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -3707,6 +4500,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3723,6 +4519,9 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .UserPermission permission = 1; + */ public Builder clearPermission() { if (permissionBuilder_ == null) { permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); @@ -3733,11 +4532,17 @@ public final class AccessControlProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPermissionFieldBuilder().getBuilder(); } + /** + * required .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { if (permissionBuilder_ != null) { return permissionBuilder_.getMessageOrBuilder(); @@ -3745,6 +4550,9 @@ public final class AccessControlProtos { return permission_; } } + /** + * required .UserPermission permission = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> getPermissionFieldBuilder() { @@ -3758,84 +4566,145 @@ public final class AccessControlProtos { } return permissionBuilder_; } - + // @@protoc_insertion_point(builder_scope:RevokeRequest) } - + static { defaultInstance = new RevokeRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RevokeRequest) } - + public interface RevokeResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code RevokeResponse} + */ public static final class RevokeResponse extends com.google.protobuf.GeneratedMessage implements RevokeResponseOrBuilder { // Use RevokeResponse.newBuilder() to construct. - private RevokeResponse(Builder builder) { + private RevokeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RevokeResponse(boolean noInit) {} - + private RevokeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RevokeResponse defaultInstance; public static RevokeResponse getDefaultInstance() { return defaultInstance; } - + public RevokeResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RevokeResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RevokeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RevokeResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3845,101 +4714,95 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RevokeResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponseOrBuilder { @@ -3947,18 +4810,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3969,25 +4835,25 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3995,23 +4861,13 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)other); @@ -4020,102 +4876,171 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:RevokeResponse) } - + static { defaultInstance = new RevokeResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RevokeResponse) } - + public interface UserPermissionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes table = 1; + /** + * optional bytes table = 1; + */ boolean hasTable(); + /** + * optional bytes table = 1; + */ com.google.protobuf.ByteString getTable(); } + /** + * Protobuf type {@code UserPermissionsRequest} + */ public static final class UserPermissionsRequest extends com.google.protobuf.GeneratedMessage implements UserPermissionsRequestOrBuilder { // Use UserPermissionsRequest.newBuilder() to construct. - private UserPermissionsRequest(Builder builder) { + private UserPermissionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserPermissionsRequest(boolean noInit) {} - + private UserPermissionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserPermissionsRequest defaultInstance; public static UserPermissionsRequest getDefaultInstance() { return defaultInstance; } - + public UserPermissionsRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserPermissionsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + table_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserPermissionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserPermissionsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes table = 1; public static final int TABLE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString table_; + /** + * optional bytes table = 1; + */ public boolean hasTable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes table = 1; + */ public com.google.protobuf.ByteString getTable() { return table_; } - + private void initFields() { table_ = com.google.protobuf.ByteString.EMPTY; } @@ -4123,11 +5048,11 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4136,12 +5061,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4151,14 +5076,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4168,7 +5093,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) obj; - + boolean result = true; result = result && (hasTable() == other.hasTable()); if (hasTable()) { @@ -4179,9 +5104,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTable()) { @@ -4189,89 +5118,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getTable().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserPermissionsRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequestOrBuilder { @@ -4279,18 +5198,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4301,27 +5223,27 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); table_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -4329,17 +5251,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest(this); int from_bitField0_ = bitField0_; @@ -4352,7 +5264,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)other); @@ -4361,7 +5273,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance()) return this; if (other.hasTable()) { @@ -4370,53 +5282,47 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - table_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes table = 1; private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes table = 1; + */ public boolean hasTable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes table = 1; + */ public com.google.protobuf.ByteString getTable() { return table_; } + /** + * optional bytes table = 1; + */ public Builder setTable(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -4426,86 +5332,192 @@ public final class AccessControlProtos { onChanged(); return this; } + /** + * optional bytes table = 1; + */ public Builder clearTable() { bitField0_ = (bitField0_ & ~0x00000001); table_ = getDefaultInstance().getTable(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:UserPermissionsRequest) } - + static { defaultInstance = new UserPermissionsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserPermissionsRequest) } - + public interface UserPermissionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .UserPermission permission = 1; + /** + * repeated .UserPermission permission = 1; + */ java.util.List getPermissionList(); + /** + * repeated .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index); + /** + * repeated .UserPermission permission = 1; + */ int getPermissionCount(); + /** + * repeated .UserPermission permission = 1; + */ java.util.List getPermissionOrBuilderList(); + /** + * repeated .UserPermission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( int index); } + /** + * Protobuf type {@code UserPermissionsResponse} + */ public static final class UserPermissionsResponse extends com.google.protobuf.GeneratedMessage implements UserPermissionsResponseOrBuilder { // Use UserPermissionsResponse.newBuilder() to construct. - private UserPermissionsResponse(Builder builder) { + private UserPermissionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserPermissionsResponse(boolean noInit) {} - + private UserPermissionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserPermissionsResponse defaultInstance; public static UserPermissionsResponse getDefaultInstance() { return defaultInstance; } - + public UserPermissionsResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserPermissionsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + permission_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = java.util.Collections.unmodifiableList(permission_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserPermissionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserPermissionsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .UserPermission permission = 1; public static final int PERMISSION_FIELD_NUMBER = 1; private java.util.List permission_; + /** + * repeated .UserPermission permission = 1; + */ public java.util.List getPermissionList() { return permission_; } + /** + * repeated .UserPermission permission = 1; + */ public java.util.List getPermissionOrBuilderList() { return permission_; } + /** + * repeated .UserPermission permission = 1; + */ public int getPermissionCount() { return permission_.size(); } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { return permission_.get(index); } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( int index) { return permission_.get(index); } - + private void initFields() { permission_ = java.util.Collections.emptyList(); } @@ -4513,7 +5525,7 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getPermissionCount(); i++) { if (!getPermission(i).isInitialized()) { memoizedIsInitialized = 0; @@ -4523,7 +5535,7 @@ public final class AccessControlProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4532,12 +5544,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < permission_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -4547,14 +5559,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4564,7 +5576,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) obj; - + boolean result = true; result = result && getPermissionList() .equals(other.getPermissionList()); @@ -4572,9 +5584,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getPermissionCount() > 0) { @@ -4582,89 +5598,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermissionList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserPermissionsResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponseOrBuilder { @@ -4672,18 +5678,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4695,7 +5704,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (permissionBuilder_ == null) { @@ -4706,20 +5715,20 @@ public final class AccessControlProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -4727,17 +5736,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse(this); int from_bitField0_ = bitField0_; @@ -4753,7 +5752,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)other); @@ -4762,7 +5761,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()) return this; if (permissionBuilder_ == null) { @@ -4794,7 +5793,7 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getPermissionCount(); i++) { if (!getPermission(i).isInitialized()) { @@ -4804,42 +5803,26 @@ public final class AccessControlProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermission(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .UserPermission permission = 1; private java.util.List permission_ = java.util.Collections.emptyList(); @@ -4849,10 +5832,13 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; - + + /** + * repeated .UserPermission permission = 1; + */ public java.util.List getPermissionList() { if (permissionBuilder_ == null) { return java.util.Collections.unmodifiableList(permission_); @@ -4860,6 +5846,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessageList(); } } + /** + * repeated .UserPermission permission = 1; + */ public int getPermissionCount() { if (permissionBuilder_ == null) { return permission_.size(); @@ -4867,6 +5856,9 @@ public final class AccessControlProtos { return permissionBuilder_.getCount(); } } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { if (permissionBuilder_ == null) { return permission_.get(index); @@ -4874,6 +5866,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessage(index); } } + /** + * repeated .UserPermission permission = 1; + */ public Builder setPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { @@ -4888,6 +5883,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder setPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -4899,6 +5897,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { if (value == null) { @@ -4912,6 +5913,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder addPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { if (permissionBuilder_ == null) { @@ -4926,6 +5930,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder addPermission( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -4937,6 +5944,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder addPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -4948,6 +5958,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder addAllPermission( java.lang.Iterable values) { if (permissionBuilder_ == null) { @@ -4959,6 +5972,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder clearPermission() { if (permissionBuilder_ == null) { permission_ = java.util.Collections.emptyList(); @@ -4969,6 +5985,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public Builder removePermission(int index) { if (permissionBuilder_ == null) { ensurePermissionIsMutable(); @@ -4979,10 +5998,16 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder( int index) { return getPermissionFieldBuilder().getBuilder(index); } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( int index) { if (permissionBuilder_ == null) { @@ -4990,6 +6015,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .UserPermission permission = 1; + */ public java.util.List getPermissionOrBuilderList() { if (permissionBuilder_ != null) { @@ -4998,15 +6026,24 @@ public final class AccessControlProtos { return java.util.Collections.unmodifiableList(permission_); } } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder() { return getPermissionFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); } + /** + * repeated .UserPermission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder( int index) { return getPermissionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); } + /** + * repeated .UserPermission permission = 1; + */ public java.util.List getPermissionBuilderList() { return getPermissionFieldBuilder().getBuilderList(); @@ -5025,80 +6062,183 @@ public final class AccessControlProtos { } return permissionBuilder_; } - + // @@protoc_insertion_point(builder_scope:UserPermissionsResponse) } - + static { defaultInstance = new UserPermissionsResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserPermissionsResponse) } - + public interface CheckPermissionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Permission permission = 1; + /** + * repeated .Permission permission = 1; + */ java.util.List getPermissionList(); + /** + * repeated .Permission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index); + /** + * repeated .Permission permission = 1; + */ int getPermissionCount(); + /** + * repeated .Permission permission = 1; + */ java.util.List getPermissionOrBuilderList(); + /** + * repeated .Permission permission = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( int index); } + /** + * Protobuf type {@code CheckPermissionsRequest} + */ public static final class CheckPermissionsRequest extends com.google.protobuf.GeneratedMessage implements CheckPermissionsRequestOrBuilder { // Use CheckPermissionsRequest.newBuilder() to construct. - private CheckPermissionsRequest(Builder builder) { + private CheckPermissionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CheckPermissionsRequest(boolean noInit) {} - + private CheckPermissionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CheckPermissionsRequest defaultInstance; public static CheckPermissionsRequest getDefaultInstance() { return defaultInstance; } - + public CheckPermissionsRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CheckPermissionsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + permission_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = java.util.Collections.unmodifiableList(permission_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CheckPermissionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CheckPermissionsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .Permission permission = 1; public static final int PERMISSION_FIELD_NUMBER = 1; private java.util.List permission_; + /** + * repeated .Permission permission = 1; + */ public java.util.List getPermissionList() { return permission_; } + /** + * repeated .Permission permission = 1; + */ public java.util.List getPermissionOrBuilderList() { return permission_; } + /** + * repeated .Permission permission = 1; + */ public int getPermissionCount() { return permission_.size(); } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { return permission_.get(index); } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( int index) { return permission_.get(index); } - + private void initFields() { permission_ = java.util.Collections.emptyList(); } @@ -5106,11 +6246,11 @@ public final class AccessControlProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5119,12 +6259,12 @@ public final class AccessControlProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < permission_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -5134,14 +6274,14 @@ public final class AccessControlProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5151,7 +6291,7 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) obj; - + boolean result = true; result = result && getPermissionList() .equals(other.getPermissionList()); @@ -5159,9 +6299,13 @@ public final class AccessControlProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getPermissionCount() > 0) { @@ -5169,89 +6313,79 @@ public final class AccessControlProtos { hash = (53 * hash) + getPermissionList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CheckPermissionsRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequestOrBuilder { @@ -5259,18 +6393,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5282,7 +6419,7 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (permissionBuilder_ == null) { @@ -5293,20 +6430,20 @@ public final class AccessControlProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -5314,17 +6451,7 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest(this); int from_bitField0_ = bitField0_; @@ -5340,7 +6467,7 @@ public final class AccessControlProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)other); @@ -5349,7 +6476,7 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance()) return this; if (permissionBuilder_ == null) { @@ -5381,46 +6508,30 @@ public final class AccessControlProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermission(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Permission permission = 1; private java.util.List permission_ = java.util.Collections.emptyList(); @@ -5430,10 +6541,13 @@ public final class AccessControlProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; - + + /** + * repeated .Permission permission = 1; + */ public java.util.List getPermissionList() { if (permissionBuilder_ == null) { return java.util.Collections.unmodifiableList(permission_); @@ -5441,6 +6555,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessageList(); } } + /** + * repeated .Permission permission = 1; + */ public int getPermissionCount() { if (permissionBuilder_ == null) { return permission_.size(); @@ -5448,6 +6565,9 @@ public final class AccessControlProtos { return permissionBuilder_.getCount(); } } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { if (permissionBuilder_ == null) { return permission_.get(index); @@ -5455,6 +6575,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessage(index); } } + /** + * repeated .Permission permission = 1; + */ public Builder setPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionBuilder_ == null) { @@ -5469,6 +6592,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder setPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -5480,6 +6606,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionBuilder_ == null) { if (value == null) { @@ -5493,6 +6622,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder addPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { if (permissionBuilder_ == null) { @@ -5507,6 +6639,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder addPermission( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -5518,6 +6653,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder addPermission( int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { if (permissionBuilder_ == null) { @@ -5529,6 +6667,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder addAllPermission( java.lang.Iterable values) { if (permissionBuilder_ == null) { @@ -5540,6 +6681,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder clearPermission() { if (permissionBuilder_ == null) { permission_ = java.util.Collections.emptyList(); @@ -5550,6 +6694,9 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public Builder removePermission(int index) { if (permissionBuilder_ == null) { ensurePermissionIsMutable(); @@ -5560,10 +6707,16 @@ public final class AccessControlProtos { } return this; } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder( int index) { return getPermissionFieldBuilder().getBuilder(index); } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( int index) { if (permissionBuilder_ == null) { @@ -5571,6 +6724,9 @@ public final class AccessControlProtos { return permissionBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Permission permission = 1; + */ public java.util.List getPermissionOrBuilderList() { if (permissionBuilder_ != null) { @@ -5579,15 +6735,24 @@ public final class AccessControlProtos { return java.util.Collections.unmodifiableList(permission_); } } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder() { return getPermissionFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); } + /** + * repeated .Permission permission = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder( int index) { return getPermissionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); } + /** + * repeated .Permission permission = 1; + */ public java.util.List getPermissionBuilderList() { return getPermissionFieldBuilder().getBuilderList(); @@ -5606,84 +6771,145 @@ public final class AccessControlProtos { } return permissionBuilder_; } - + // @@protoc_insertion_point(builder_scope:CheckPermissionsRequest) } - + static { defaultInstance = new CheckPermissionsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CheckPermissionsRequest) } - + public interface CheckPermissionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CheckPermissionsResponse} + */ public static final class CheckPermissionsResponse extends com.google.protobuf.GeneratedMessage implements CheckPermissionsResponseOrBuilder { // Use CheckPermissionsResponse.newBuilder() to construct. - private CheckPermissionsResponse(Builder builder) { + private CheckPermissionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CheckPermissionsResponse(boolean noInit) {} - + private CheckPermissionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CheckPermissionsResponse defaultInstance; public static CheckPermissionsResponse getDefaultInstance() { return defaultInstance; } - + public CheckPermissionsResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CheckPermissionsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CheckPermissionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CheckPermissionsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5693,101 +6919,95 @@ public final class AccessControlProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CheckPermissionsResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponseOrBuilder { @@ -5795,18 +7015,21 @@ public final class AccessControlProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5817,25 +7040,25 @@ public final class AccessControlProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse build() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -5843,23 +7066,13 @@ public final class AccessControlProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)other); @@ -5868,83 +7081,88 @@ public final class AccessControlProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CheckPermissionsResponse) } - + static { defaultInstance = new CheckPermissionsResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CheckPermissionsResponse) } - + + /** + * Protobuf service {@code AccessControlService} + */ public static abstract class AccessControlService implements com.google.protobuf.Service { protected AccessControlService() {} - + public interface Interface { + /** + * rpc grant(.GrantRequest) returns (.GrantResponse); + */ public abstract void grant( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc revoke(.RevokeRequest) returns (.RevokeResponse); + */ public abstract void revoke( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getUserPermissions(.UserPermissionsRequest) returns (.UserPermissionsResponse); + */ public abstract void getUserPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc checkPermissions(.CheckPermissionsRequest) returns (.CheckPermissionsResponse); + */ public abstract void checkPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new AccessControlService() { @@ -5955,7 +7173,7 @@ public final class AccessControlProtos { com.google.protobuf.RpcCallback done) { impl.grant(controller, request, done); } - + @java.lang.Override public void revoke( com.google.protobuf.RpcController controller, @@ -5963,7 +7181,7 @@ public final class AccessControlProtos { com.google.protobuf.RpcCallback done) { impl.revoke(controller, request, done); } - + @java.lang.Override public void getUserPermissions( com.google.protobuf.RpcController controller, @@ -5971,7 +7189,7 @@ public final class AccessControlProtos { com.google.protobuf.RpcCallback done) { impl.getUserPermissions(controller, request, done); } - + @java.lang.Override public void checkPermissions( com.google.protobuf.RpcController controller, @@ -5979,10 +7197,10 @@ public final class AccessControlProtos { com.google.protobuf.RpcCallback done) { impl.checkPermissions(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -5990,7 +7208,7 @@ public final class AccessControlProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -6014,7 +7232,7 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -6036,7 +7254,7 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -6058,30 +7276,42 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc grant(.GrantRequest) returns (.GrantResponse); + */ public abstract void grant( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc revoke(.RevokeRequest) returns (.RevokeResponse); + */ public abstract void revoke( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getUserPermissions(.UserPermissionsRequest) returns (.UserPermissionsResponse); + */ public abstract void getUserPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc checkPermissions(.CheckPermissionsRequest) returns (.CheckPermissionsResponse); + */ public abstract void checkPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -6091,7 +7321,7 @@ public final class AccessControlProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -6128,7 +7358,7 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -6150,7 +7380,7 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -6172,23 +7402,23 @@ public final class AccessControlProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void grant( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, @@ -6203,7 +7433,7 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance())); } - + public void revoke( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, @@ -6218,7 +7448,7 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance())); } - + public void getUserPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, @@ -6233,7 +7463,7 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance())); } - + public void checkPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, @@ -6249,41 +7479,41 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) @@ -6294,8 +7524,8 @@ public final class AccessControlProtos { request, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) @@ -6306,8 +7536,8 @@ public final class AccessControlProtos { request, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) @@ -6318,8 +7548,8 @@ public final class AccessControlProtos { request, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) @@ -6330,10 +7560,12 @@ public final class AccessControlProtos { request, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:AccessControlService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_Permission_descriptor; private static @@ -6394,7 +7626,7 @@ public final class AccessControlProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CheckPermissionsResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -6441,97 +7673,73 @@ public final class AccessControlProtos { internal_static_Permission_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Permission_descriptor, - new java.lang.String[] { "Action", "Table", "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class); + new java.lang.String[] { "Action", "Table", "Family", "Qualifier", }); internal_static_UserPermission_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_UserPermission_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserPermission_descriptor, - new java.lang.String[] { "User", "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class); + new java.lang.String[] { "User", "Permission", }); internal_static_UserTablePermissions_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_UserTablePermissions_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserTablePermissions_descriptor, - new java.lang.String[] { "Permissions", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class); + new java.lang.String[] { "Permissions", }); internal_static_UserTablePermissions_UserPermissions_descriptor = internal_static_UserTablePermissions_descriptor.getNestedTypes().get(0); internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserTablePermissions_UserPermissions_descriptor, - new java.lang.String[] { "User", "Permissions", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class); + new java.lang.String[] { "User", "Permissions", }); internal_static_GrantRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_GrantRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GrantRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class); + new java.lang.String[] { "Permission", }); internal_static_GrantResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_GrantResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GrantResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class); + new java.lang.String[] { }); internal_static_RevokeRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_RevokeRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RevokeRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class); + new java.lang.String[] { "Permission", }); internal_static_RevokeResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_RevokeResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RevokeResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class); + new java.lang.String[] { }); internal_static_UserPermissionsRequest_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_UserPermissionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserPermissionsRequest_descriptor, - new java.lang.String[] { "Table", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); + new java.lang.String[] { "Table", }); internal_static_UserPermissionsResponse_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_UserPermissionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserPermissionsResponse_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); + new java.lang.String[] { "Permission", }); internal_static_CheckPermissionsRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_CheckPermissionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CheckPermissionsRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class); + new java.lang.String[] { "Permission", }); internal_static_CheckPermissionsResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_CheckPermissionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CheckPermissionsResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -6540,6 +7748,6 @@ public final class AccessControlProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java index a23c498..9c86fac 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -10,68 +10,178 @@ public final class AdminProtos { } public interface GetRegionInfoRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional bool compactionState = 2; + /** + * optional bool compactionState = 2; + */ boolean hasCompactionState(); + /** + * optional bool compactionState = 2; + */ boolean getCompactionState(); } + /** + * Protobuf type {@code GetRegionInfoRequest} + */ public static final class GetRegionInfoRequest extends com.google.protobuf.GeneratedMessage implements GetRegionInfoRequestOrBuilder { // Use GetRegionInfoRequest.newBuilder() to construct. - private GetRegionInfoRequest(Builder builder) { + private GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetRegionInfoRequest(boolean noInit) {} - + private GetRegionInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetRegionInfoRequest defaultInstance; public static GetRegionInfoRequest getDefaultInstance() { return defaultInstance; } - + public GetRegionInfoRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetRegionInfoRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + compactionState_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetRegionInfoRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRegionInfoRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional bool compactionState = 2; public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; private boolean compactionState_; + /** + * optional bool compactionState = 2; + */ public boolean hasCompactionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool compactionState = 2; + */ public boolean getCompactionState() { return compactionState_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); compactionState_ = false; @@ -80,7 +190,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -92,7 +202,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -104,12 +214,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -123,14 +233,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -140,7 +250,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -156,9 +266,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -170,89 +284,79 @@ public final class AdminProtos { hash = (53 * hash) + hashBoolean(getCompactionState()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetRegionInfoRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { @@ -260,18 +364,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -283,7 +390,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -296,20 +403,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); if (!result.isInitialized()) { @@ -317,17 +424,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); int from_bitField0_ = bitField0_; @@ -348,7 +445,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); @@ -357,7 +454,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -369,7 +466,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -381,57 +478,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - compactionState_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -439,6 +518,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -452,6 +534,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -463,6 +548,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -479,6 +567,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -489,11 +580,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -501,6 +598,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -514,95 +614,235 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional bool compactionState = 2; private boolean compactionState_ ; + /** + * optional bool compactionState = 2; + */ public boolean hasCompactionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool compactionState = 2; + */ public boolean getCompactionState() { return compactionState_; } + /** + * optional bool compactionState = 2; + */ public Builder setCompactionState(boolean value) { bitField0_ |= 0x00000002; compactionState_ = value; onChanged(); return this; } + /** + * optional bool compactionState = 2; + */ public Builder clearCompactionState() { bitField0_ = (bitField0_ & ~0x00000002); compactionState_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) } - + static { defaultInstance = new GetRegionInfoRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) } - + public interface GetRegionInfoResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionInfo regionInfo = 1; + /** + * required .RegionInfo regionInfo = 1; + */ boolean hasRegionInfo(); + /** + * required .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + /** + * required .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ boolean hasCompactionState(); + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(); } + /** + * Protobuf type {@code GetRegionInfoResponse} + */ public static final class GetRegionInfoResponse extends com.google.protobuf.GeneratedMessage implements GetRegionInfoResponseOrBuilder { // Use GetRegionInfoResponse.newBuilder() to construct. - private GetRegionInfoResponse(Builder builder) { + private GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetRegionInfoResponse(boolean noInit) {} - + private GetRegionInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetRegionInfoResponse defaultInstance; public static GetRegionInfoResponse getDefaultInstance() { return defaultInstance; } - + public GetRegionInfoResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetRegionInfoResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = regionInfo_.toBuilder(); + } + regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionInfo_); + regionInfo_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + compactionState_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetRegionInfoResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRegionInfoResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code GetRegionInfoResponse.CompactionState} + */ public enum CompactionState implements com.google.protobuf.ProtocolMessageEnum { + /** + * NONE = 0; + */ NONE(0, 0), + /** + * MINOR = 1; + */ MINOR(1, 1), + /** + * MAJOR = 2; + */ MAJOR(2, 2), + /** + * MAJOR_AND_MINOR = 3; + */ MAJOR_AND_MINOR(3, 3), ; - + + /** + * NONE = 0; + */ public static final int NONE_VALUE = 0; + /** + * MINOR = 1; + */ public static final int MINOR_VALUE = 1; + /** + * MAJOR = 2; + */ public static final int MAJOR_VALUE = 2; + /** + * MAJOR_AND_MINOR = 3; + */ public static final int MAJOR_AND_MINOR_VALUE = 3; - - + + public final int getNumber() { return value; } - + public static CompactionState valueOf(int value) { switch (value) { case 0: return NONE; @@ -612,7 +852,7 @@ public final class AdminProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -624,7 +864,7 @@ public final class AdminProtos { return CompactionState.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -637,11 +877,9 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0); } - - private static final CompactionState[] VALUES = { - NONE, MINOR, MAJOR, MAJOR_AND_MINOR, - }; - + + private static final CompactionState[] VALUES = values(); + public static CompactionState valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -650,42 +888,57 @@ public final class AdminProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private CompactionState(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState) } - + private int bitField0_; // required .RegionInfo regionInfo = 1; public static final int REGIONINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + /** + * required .RegionInfo regionInfo = 1; + */ public boolean hasRegionInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { return regionInfo_; } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { return regionInfo_; } - + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_; + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public boolean hasCompactionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { return compactionState_; } - + private void initFields() { regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; @@ -694,7 +947,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionInfo()) { memoizedIsInitialized = 0; return false; @@ -706,7 +959,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -718,12 +971,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -737,14 +990,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -754,7 +1007,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; - + boolean result = true; result = result && (hasRegionInfo() == other.hasRegionInfo()); if (hasRegionInfo()) { @@ -770,9 +1023,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionInfo()) { @@ -784,89 +1041,79 @@ public final class AdminProtos { hash = (53 * hash) + hashEnum(getCompactionState()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetRegionInfoResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { @@ -874,18 +1121,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -897,7 +1147,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { @@ -910,20 +1160,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); if (!result.isInitialized()) { @@ -931,17 +1181,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); int from_bitField0_ = bitField0_; @@ -962,7 +1202,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); @@ -971,7 +1211,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; if (other.hasRegionInfo()) { @@ -983,7 +1223,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionInfo()) { @@ -995,63 +1235,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegionInfo()) { - subBuilder.mergeFrom(getRegionInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionInfo(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - compactionState_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionInfo regionInfo = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + /** + * required .RegionInfo regionInfo = 1; + */ public boolean hasRegionInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { return regionInfo_; @@ -1059,6 +1275,9 @@ public final class AdminProtos { return regionInfoBuilder_.getMessage(); } } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (value == null) { @@ -1072,6 +1291,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionInfoBuilder_ == null) { @@ -1083,6 +1305,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1099,6 +1324,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); @@ -1109,11 +1337,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionInfoFieldBuilder().getBuilder(); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); @@ -1121,6 +1355,9 @@ public final class AdminProtos { return regionInfo_; } } + /** + * required .RegionInfo regionInfo = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { @@ -1134,15 +1371,24 @@ public final class AdminProtos { } return regionInfoBuilder_; } - + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public boolean hasCompactionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { return compactionState_; } + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) { if (value == null) { throw new NullPointerException(); @@ -1152,102 +1398,233 @@ public final class AdminProtos { onChanged(); return this; } + /** + * optional .GetRegionInfoResponse.CompactionState compactionState = 2; + */ public Builder clearCompactionState() { bitField0_ = (bitField0_ & ~0x00000002); compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) } - + static { defaultInstance = new GetRegionInfoResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) } - + public interface GetStoreFileRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // repeated bytes family = 2; + /** + * repeated bytes family = 2; + */ java.util.List getFamilyList(); + /** + * repeated bytes family = 2; + */ int getFamilyCount(); + /** + * repeated bytes family = 2; + */ com.google.protobuf.ByteString getFamily(int index); } + /** + * Protobuf type {@code GetStoreFileRequest} + * + *
+   **
+   * Get a list of store files for a set of column families in a particular region.
+   * If no column family is specified, get the store files for all column families.
+   * 
+ */ public static final class GetStoreFileRequest extends com.google.protobuf.GeneratedMessage implements GetStoreFileRequestOrBuilder { // Use GetStoreFileRequest.newBuilder() to construct. - private GetStoreFileRequest(Builder builder) { + private GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetStoreFileRequest(boolean noInit) {} - + private GetStoreFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetStoreFileRequest defaultInstance; public static GetStoreFileRequest getDefaultInstance() { return defaultInstance; } - + public GetStoreFileRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetStoreFileRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + family_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + family_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + family_ = java.util.Collections.unmodifiableList(family_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetStoreFileRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetStoreFileRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // repeated bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private java.util.List family_; + /** + * repeated bytes family = 2; + */ public java.util.List getFamilyList() { return family_; } + /** + * repeated bytes family = 2; + */ public int getFamilyCount() { return family_.size(); } + /** + * repeated bytes family = 2; + */ public com.google.protobuf.ByteString getFamily(int index) { return family_.get(index); } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - family_ = java.util.Collections.emptyList();; + family_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -1259,7 +1636,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1271,12 +1648,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1295,14 +1672,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1312,7 +1689,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -1325,9 +1702,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -1339,89 +1720,85 @@ public final class AdminProtos { hash = (53 * hash) + getFamilyList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetStoreFileRequest} + * + *
+     **
+     * Get a list of store files for a set of column families in a particular region.
+     * If no column family is specified, get the store files for all column families.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { @@ -1429,18 +1806,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1452,7 +1832,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -1461,24 +1841,24 @@ public final class AdminProtos { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - family_ = java.util.Collections.emptyList();; + family_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1486,17 +1866,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this); int from_bitField0_ = bitField0_; @@ -1518,7 +1888,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other); @@ -1527,7 +1897,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -1546,7 +1916,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -1558,57 +1928,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - ensureFamilyIsMutable(); - family_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -1616,6 +1968,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -1629,6 +1984,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -1640,6 +1998,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1656,6 +2017,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -1666,11 +2030,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -1678,6 +2048,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -1691,25 +2064,37 @@ public final class AdminProtos { } return regionBuilder_; } - + // repeated bytes family = 2; - private java.util.List family_ = java.util.Collections.emptyList();; + private java.util.List family_ = java.util.Collections.emptyList(); private void ensureFamilyIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { family_ = new java.util.ArrayList(family_); bitField0_ |= 0x00000002; } } + /** + * repeated bytes family = 2; + */ public java.util.List getFamilyList() { return java.util.Collections.unmodifiableList(family_); } + /** + * repeated bytes family = 2; + */ public int getFamilyCount() { return family_.size(); } + /** + * repeated bytes family = 2; + */ public com.google.protobuf.ByteString getFamily(int index) { return family_.get(index); } + /** + * repeated bytes family = 2; + */ public Builder setFamily( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -1720,6 +2105,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes family = 2; + */ public Builder addFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1729,6 +2117,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes family = 2; + */ public Builder addAllFamily( java.lang.Iterable values) { ensureFamilyIsMutable(); @@ -1736,74 +2127,181 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes family = 2; + */ public Builder clearFamily() { - family_ = java.util.Collections.emptyList();; + family_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetStoreFileRequest) } - + static { defaultInstance = new GetStoreFileRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetStoreFileRequest) } - + public interface GetStoreFileResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated string storeFile = 1; - java.util.List getStoreFileList(); + /** + * repeated string storeFile = 1; + */ + java.util.List + getStoreFileList(); + /** + * repeated string storeFile = 1; + */ int getStoreFileCount(); - String getStoreFile(int index); + /** + * repeated string storeFile = 1; + */ + java.lang.String getStoreFile(int index); + /** + * repeated string storeFile = 1; + */ + com.google.protobuf.ByteString + getStoreFileBytes(int index); } + /** + * Protobuf type {@code GetStoreFileResponse} + */ public static final class GetStoreFileResponse extends com.google.protobuf.GeneratedMessage implements GetStoreFileResponseOrBuilder { // Use GetStoreFileResponse.newBuilder() to construct. - private GetStoreFileResponse(Builder builder) { + private GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetStoreFileResponse(boolean noInit) {} - + private GetStoreFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetStoreFileResponse defaultInstance; public static GetStoreFileResponse getDefaultInstance() { return defaultInstance; } - + public GetStoreFileResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetStoreFileResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + storeFile_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetStoreFileResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetStoreFileResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated string storeFile = 1; public static final int STOREFILE_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList storeFile_; - public java.util.List + /** + * repeated string storeFile = 1; + */ + public java.util.List getStoreFileList() { return storeFile_; } + /** + * repeated string storeFile = 1; + */ public int getStoreFileCount() { return storeFile_.size(); } - public String getStoreFile(int index) { + /** + * repeated string storeFile = 1; + */ + public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } - + /** + * repeated string storeFile = 1; + */ + public com.google.protobuf.ByteString + getStoreFileBytes(int index) { + return storeFile_.getByteString(index); + } + private void initFields() { storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @@ -1811,11 +2309,11 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1824,12 +2322,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -1844,14 +2342,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1861,7 +2359,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj; - + boolean result = true; result = result && getStoreFileList() .equals(other.getStoreFileList()); @@ -1869,9 +2367,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getStoreFileCount() > 0) { @@ -1879,89 +2381,79 @@ public final class AdminProtos { hash = (53 * hash) + getStoreFileList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetStoreFileResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { @@ -1969,18 +2461,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1991,27 +2486,27 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2019,17 +2514,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this); int from_bitField0_ = bitField0_; @@ -2042,7 +2527,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other); @@ -2051,7 +2536,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this; if (!other.storeFile_.isEmpty()) { @@ -2067,45 +2552,30 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureStoreFileIsMutable(); - storeFile_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated string storeFile = 1; private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureStoreFileIsMutable() { @@ -2114,18 +2584,37 @@ public final class AdminProtos { bitField0_ |= 0x00000001; } } - public java.util.List + /** + * repeated string storeFile = 1; + */ + public java.util.List getStoreFileList() { return java.util.Collections.unmodifiableList(storeFile_); } + /** + * repeated string storeFile = 1; + */ public int getStoreFileCount() { return storeFile_.size(); } - public String getStoreFile(int index) { + /** + * repeated string storeFile = 1; + */ + public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } + /** + * repeated string storeFile = 1; + */ + public com.google.protobuf.ByteString + getStoreFileBytes(int index) { + return storeFile_.getByteString(index); + } + /** + * repeated string storeFile = 1; + */ public Builder setStoreFile( - int index, String value) { + int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2134,7 +2623,11 @@ public final class AdminProtos { onChanged(); return this; } - public Builder addStoreFile(String value) { + /** + * repeated string storeFile = 1; + */ + public Builder addStoreFile( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2143,102 +2636,177 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated string storeFile = 1; + */ public Builder addAllStoreFile( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureStoreFileIsMutable(); super.addAll(values, storeFile_); onChanged(); return this; } + /** + * repeated string storeFile = 1; + */ public Builder clearStoreFile() { storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - void addStoreFile(com.google.protobuf.ByteString value) { - ensureStoreFileIsMutable(); + /** + * repeated string storeFile = 1; + */ + public Builder addStoreFileBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); storeFile_.add(value); onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:GetStoreFileResponse) } - + static { defaultInstance = new GetStoreFileResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetStoreFileResponse) } - + public interface GetOnlineRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code GetOnlineRegionRequest} + */ public static final class GetOnlineRegionRequest extends com.google.protobuf.GeneratedMessage implements GetOnlineRegionRequestOrBuilder { // Use GetOnlineRegionRequest.newBuilder() to construct. - private GetOnlineRegionRequest(Builder builder) { + private GetOnlineRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetOnlineRegionRequest(boolean noInit) {} - + private GetOnlineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetOnlineRegionRequest defaultInstance; public static GetOnlineRegionRequest getDefaultInstance() { return defaultInstance; } - + public GetOnlineRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetOnlineRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetOnlineRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetOnlineRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2248,101 +2816,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetOnlineRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { @@ -2350,18 +2912,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2372,25 +2937,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2398,23 +2963,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); @@ -2423,118 +2978,211 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) } - + static { defaultInstance = new GetOnlineRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) } - + public interface GetOnlineRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .RegionInfo regionInfo = 1; + /** + * repeated .RegionInfo regionInfo = 1; + */ java.util.List getRegionInfoList(); + /** + * repeated .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); + /** + * repeated .RegionInfo regionInfo = 1; + */ int getRegionInfoCount(); + /** + * repeated .RegionInfo regionInfo = 1; + */ java.util.List getRegionInfoOrBuilderList(); + /** + * repeated .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index); } + /** + * Protobuf type {@code GetOnlineRegionResponse} + */ public static final class GetOnlineRegionResponse extends com.google.protobuf.GeneratedMessage implements GetOnlineRegionResponseOrBuilder { // Use GetOnlineRegionResponse.newBuilder() to construct. - private GetOnlineRegionResponse(Builder builder) { + private GetOnlineRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetOnlineRegionResponse(boolean noInit) {} - + private GetOnlineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetOnlineRegionResponse defaultInstance; public static GetOnlineRegionResponse getDefaultInstance() { return defaultInstance; } - + public GetOnlineRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetOnlineRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetOnlineRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetOnlineRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .RegionInfo regionInfo = 1; public static final int REGIONINFO_FIELD_NUMBER = 1; private java.util.List regionInfo_; + /** + * repeated .RegionInfo regionInfo = 1; + */ public java.util.List getRegionInfoList() { return regionInfo_; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public java.util.List getRegionInfoOrBuilderList() { return regionInfo_; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public int getRegionInfoCount() { return regionInfo_.size(); } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { return regionInfo_.get(index); } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index) { return regionInfo_.get(index); } - + private void initFields() { regionInfo_ = java.util.Collections.emptyList(); } @@ -2542,7 +3190,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { memoizedIsInitialized = 0; @@ -2552,7 +3200,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2561,12 +3209,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -2576,14 +3224,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2593,7 +3241,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; - + boolean result = true; result = result && getRegionInfoList() .equals(other.getRegionInfoList()); @@ -2601,9 +3249,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getRegionInfoCount() > 0) { @@ -2611,89 +3263,79 @@ public final class AdminProtos { hash = (53 * hash) + getRegionInfoList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetOnlineRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { @@ -2701,18 +3343,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2724,7 +3369,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { @@ -2735,20 +3380,20 @@ public final class AdminProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2756,17 +3401,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); int from_bitField0_ = bitField0_; @@ -2782,7 +3417,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); @@ -2791,7 +3426,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; if (regionInfoBuilder_ == null) { @@ -2823,7 +3458,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { @@ -2833,42 +3468,26 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionInfo(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .RegionInfo regionInfo = 1; private java.util.List regionInfo_ = java.util.Collections.emptyList(); @@ -2878,10 +3497,13 @@ public final class AdminProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - + + /** + * repeated .RegionInfo regionInfo = 1; + */ public java.util.List getRegionInfoList() { if (regionInfoBuilder_ == null) { return java.util.Collections.unmodifiableList(regionInfo_); @@ -2889,6 +3511,9 @@ public final class AdminProtos { return regionInfoBuilder_.getMessageList(); } } + /** + * repeated .RegionInfo regionInfo = 1; + */ public int getRegionInfoCount() { if (regionInfoBuilder_ == null) { return regionInfo_.size(); @@ -2896,6 +3521,9 @@ public final class AdminProtos { return regionInfoBuilder_.getCount(); } } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { if (regionInfoBuilder_ == null) { return regionInfo_.get(index); @@ -2903,6 +3531,9 @@ public final class AdminProtos { return regionInfoBuilder_.getMessage(index); } } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { @@ -2917,6 +3548,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionInfoBuilder_ == null) { @@ -2928,6 +3562,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (value == null) { @@ -2941,6 +3578,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder addRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { @@ -2955,6 +3595,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder addRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionInfoBuilder_ == null) { @@ -2966,6 +3609,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder addRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionInfoBuilder_ == null) { @@ -2977,6 +3623,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder addAllRegionInfo( java.lang.Iterable values) { if (regionInfoBuilder_ == null) { @@ -2988,6 +3637,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { regionInfo_ = java.util.Collections.emptyList(); @@ -2998,6 +3650,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public Builder removeRegionInfo(int index) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); @@ -3008,10 +3663,16 @@ public final class AdminProtos { } return this; } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( int index) { return getRegionInfoFieldBuilder().getBuilder(index); } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index) { if (regionInfoBuilder_ == null) { @@ -3019,6 +3680,9 @@ public final class AdminProtos { return regionInfoBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .RegionInfo regionInfo = 1; + */ public java.util.List getRegionInfoOrBuilderList() { if (regionInfoBuilder_ != null) { @@ -3027,15 +3691,24 @@ public final class AdminProtos { return java.util.Collections.unmodifiableList(regionInfo_); } } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { return getRegionInfoFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); } + /** + * repeated .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( int index) { return getRegionInfoFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); } + /** + * repeated .RegionInfo regionInfo = 1; + */ public java.util.List getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); @@ -3054,123 +3727,321 @@ public final class AdminProtos { } return regionInfoBuilder_; } - + // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) } - + static { defaultInstance = new GetOnlineRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) } - + public interface OpenRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ java.util.List getOpenInfoList(); + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index); + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ int getOpenInfoCount(); + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ java.util.List getOpenInfoOrBuilderList(); + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( int index); } + /** + * Protobuf type {@code OpenRegionRequest} + */ public static final class OpenRegionRequest extends com.google.protobuf.GeneratedMessage implements OpenRegionRequestOrBuilder { // Use OpenRegionRequest.newBuilder() to construct. - private OpenRegionRequest(Builder builder) { + private OpenRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private OpenRegionRequest(boolean noInit) {} - + private OpenRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final OpenRegionRequest defaultInstance; public static OpenRegionRequest getDefaultInstance() { return defaultInstance; } - + public OpenRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OpenRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + openInfo_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + openInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + openInfo_ = java.util.Collections.unmodifiableList(openInfo_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public OpenRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpenRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + public interface RegionOpenInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionInfo region = 1; + /** + * required .RegionInfo region = 1; + */ boolean hasRegion(); + /** + * required .RegionInfo region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion(); + /** + * required .RegionInfo region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); - + // optional uint32 versionOfOfflineNode = 2; + /** + * optional uint32 versionOfOfflineNode = 2; + */ boolean hasVersionOfOfflineNode(); + /** + * optional uint32 versionOfOfflineNode = 2; + */ int getVersionOfOfflineNode(); } + /** + * Protobuf type {@code OpenRegionRequest.RegionOpenInfo} + */ public static final class RegionOpenInfo extends com.google.protobuf.GeneratedMessage implements RegionOpenInfoOrBuilder { // Use RegionOpenInfo.newBuilder() to construct. - private RegionOpenInfo(Builder builder) { + private RegionOpenInfo(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionOpenInfo(boolean noInit) {} - + private RegionOpenInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionOpenInfo defaultInstance; public static RegionOpenInfo getDefaultInstance() { return defaultInstance; } - + public RegionOpenInfo getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionOpenInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionOpenInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionOpenInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionInfo region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_; + /** + * required .RegionInfo region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { return region_; } + /** + * required .RegionInfo region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { return region_; } - + // optional uint32 versionOfOfflineNode = 2; public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; private int versionOfOfflineNode_; + /** + * optional uint32 versionOfOfflineNode = 2; + */ public boolean hasVersionOfOfflineNode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 versionOfOfflineNode = 2; + */ public int getVersionOfOfflineNode() { return versionOfOfflineNode_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); versionOfOfflineNode_ = 0; @@ -3179,7 +4050,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -3191,7 +4062,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3203,12 +4074,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3222,14 +4093,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3239,7 +4110,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -3255,9 +4126,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -3269,89 +4144,79 @@ public final class AdminProtos { hash = (53 * hash) + getVersionOfOfflineNode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code OpenRegionRequest.RegionOpenInfo} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { @@ -3359,18 +4224,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3382,7 +4250,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -3395,20 +4263,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); if (!result.isInitialized()) { @@ -3416,17 +4284,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this); int from_bitField0_ = bitField0_; @@ -3447,7 +4305,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other); @@ -3456,7 +4314,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -3468,7 +4326,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -3480,57 +4338,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfOfflineNode_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionInfo region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; + /** + * required .RegionInfo region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { if (regionBuilder_ == null) { return region_; @@ -3538,6 +4378,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionInfo region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionBuilder_ == null) { if (value == null) { @@ -3551,6 +4394,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionBuilder_ == null) { @@ -3562,6 +4408,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3578,6 +4427,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); @@ -3588,11 +4440,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionInfo region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionInfo region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -3600,6 +4458,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionInfo region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionFieldBuilder() { @@ -3613,60 +4474,87 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional uint32 versionOfOfflineNode = 2; private int versionOfOfflineNode_ ; + /** + * optional uint32 versionOfOfflineNode = 2; + */ public boolean hasVersionOfOfflineNode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 versionOfOfflineNode = 2; + */ public int getVersionOfOfflineNode() { return versionOfOfflineNode_; } + /** + * optional uint32 versionOfOfflineNode = 2; + */ public Builder setVersionOfOfflineNode(int value) { bitField0_ |= 0x00000002; versionOfOfflineNode_ = value; onChanged(); return this; } + /** + * optional uint32 versionOfOfflineNode = 2; + */ public Builder clearVersionOfOfflineNode() { bitField0_ = (bitField0_ & ~0x00000002); versionOfOfflineNode_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:OpenRegionRequest.RegionOpenInfo) } - + static { defaultInstance = new RegionOpenInfo(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo) } - + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; public static final int OPENINFO_FIELD_NUMBER = 1; private java.util.List openInfo_; + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public java.util.List getOpenInfoList() { return openInfo_; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public java.util.List getOpenInfoOrBuilderList() { return openInfo_; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public int getOpenInfoCount() { return openInfo_.size(); } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { return openInfo_.get(index); } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( int index) { return openInfo_.get(index); } - + private void initFields() { openInfo_ = java.util.Collections.emptyList(); } @@ -3674,7 +4562,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getOpenInfoCount(); i++) { if (!getOpenInfo(i).isInitialized()) { memoizedIsInitialized = 0; @@ -3684,7 +4572,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3693,12 +4581,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < openInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -3708,14 +4596,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3725,7 +4613,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; - + boolean result = true; result = result && getOpenInfoList() .equals(other.getOpenInfoList()); @@ -3733,9 +4621,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getOpenInfoCount() > 0) { @@ -3743,89 +4635,79 @@ public final class AdminProtos { hash = (53 * hash) + getOpenInfoList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code OpenRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { @@ -3833,18 +4715,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3856,7 +4741,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (openInfoBuilder_ == null) { @@ -3867,20 +4752,20 @@ public final class AdminProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3888,17 +4773,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); int from_bitField0_ = bitField0_; @@ -3914,7 +4789,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); @@ -3923,7 +4798,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; if (openInfoBuilder_ == null) { @@ -3955,7 +4830,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getOpenInfoCount(); i++) { if (!getOpenInfo(i).isInitialized()) { @@ -3965,42 +4840,26 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addOpenInfo(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; private java.util.List openInfo_ = java.util.Collections.emptyList(); @@ -4010,10 +4869,13 @@ public final class AdminProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_; - + + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public java.util.List getOpenInfoList() { if (openInfoBuilder_ == null) { return java.util.Collections.unmodifiableList(openInfo_); @@ -4021,6 +4883,9 @@ public final class AdminProtos { return openInfoBuilder_.getMessageList(); } } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public int getOpenInfoCount() { if (openInfoBuilder_ == null) { return openInfo_.size(); @@ -4028,6 +4893,9 @@ public final class AdminProtos { return openInfoBuilder_.getCount(); } } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { if (openInfoBuilder_ == null) { return openInfo_.get(index); @@ -4035,6 +4903,9 @@ public final class AdminProtos { return openInfoBuilder_.getMessage(index); } } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder setOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { if (openInfoBuilder_ == null) { @@ -4049,6 +4920,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder setOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { if (openInfoBuilder_ == null) { @@ -4060,6 +4934,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { if (openInfoBuilder_ == null) { if (value == null) { @@ -4073,6 +4950,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder addOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { if (openInfoBuilder_ == null) { @@ -4087,6 +4967,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder addOpenInfo( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { if (openInfoBuilder_ == null) { @@ -4098,6 +4981,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder addOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { if (openInfoBuilder_ == null) { @@ -4109,6 +4995,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder addAllOpenInfo( java.lang.Iterable values) { if (openInfoBuilder_ == null) { @@ -4120,6 +5009,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder clearOpenInfo() { if (openInfoBuilder_ == null) { openInfo_ = java.util.Collections.emptyList(); @@ -4130,6 +5022,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public Builder removeOpenInfo(int index) { if (openInfoBuilder_ == null) { ensureOpenInfoIsMutable(); @@ -4140,10 +5035,16 @@ public final class AdminProtos { } return this; } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder( int index) { return getOpenInfoFieldBuilder().getBuilder(index); } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( int index) { if (openInfoBuilder_ == null) { @@ -4151,6 +5052,9 @@ public final class AdminProtos { return openInfoBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public java.util.List getOpenInfoOrBuilderList() { if (openInfoBuilder_ != null) { @@ -4159,15 +5063,24 @@ public final class AdminProtos { return java.util.Collections.unmodifiableList(openInfo_); } } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() { return getOpenInfoFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder( int index) { return getOpenInfoFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); } + /** + * repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + */ public java.util.List getOpenInfoBuilderList() { return getOpenInfoFieldBuilder().getBuilderList(); @@ -4186,68 +5099,196 @@ public final class AdminProtos { } return openInfoBuilder_; } - + // @@protoc_insertion_point(builder_scope:OpenRegionRequest) } - + static { defaultInstance = new OpenRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:OpenRegionRequest) } - + public interface OpenRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ java.util.List getOpeningStateList(); + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ int getOpeningStateCount(); + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); } + /** + * Protobuf type {@code OpenRegionResponse} + */ public static final class OpenRegionResponse extends com.google.protobuf.GeneratedMessage implements OpenRegionResponseOrBuilder { // Use OpenRegionResponse.newBuilder() to construct. - private OpenRegionResponse(Builder builder) { + private OpenRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private OpenRegionResponse(boolean noInit) {} - + private OpenRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final OpenRegionResponse defaultInstance; public static OpenRegionResponse getDefaultInstance() { return defaultInstance; } - + public OpenRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OpenRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + openingState_.add(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + openingState_.add(value); + } + } + input.popLimit(oldLimit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = java.util.Collections.unmodifiableList(openingState_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public OpenRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpenRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code OpenRegionResponse.RegionOpeningState} + */ public enum RegionOpeningState implements com.google.protobuf.ProtocolMessageEnum { + /** + * OPENED = 0; + */ OPENED(0, 0), + /** + * ALREADY_OPENED = 1; + */ ALREADY_OPENED(1, 1), + /** + * FAILED_OPENING = 2; + */ FAILED_OPENING(2, 2), ; - + + /** + * OPENED = 0; + */ public static final int OPENED_VALUE = 0; + /** + * ALREADY_OPENED = 1; + */ public static final int ALREADY_OPENED_VALUE = 1; + /** + * FAILED_OPENING = 2; + */ public static final int FAILED_OPENING_VALUE = 2; - - + + public final int getNumber() { return value; } - + public static RegionOpeningState valueOf(int value) { switch (value) { case 0: return OPENED; @@ -4256,7 +5297,7 @@ public final class AdminProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -4268,7 +5309,7 @@ public final class AdminProtos { return RegionOpeningState.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -4281,11 +5322,9 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); } - - private static final RegionOpeningState[] VALUES = { - OPENED, ALREADY_OPENED, FAILED_OPENING, - }; - + + private static final RegionOpeningState[] VALUES = values(); + public static RegionOpeningState valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -4294,31 +5333,40 @@ public final class AdminProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private RegionOpeningState(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) } - + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; public static final int OPENINGSTATE_FIELD_NUMBER = 1; private java.util.List openingState_; + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public java.util.List getOpeningStateList() { return openingState_; } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public int getOpeningStateCount() { return openingState_.size(); } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { return openingState_.get(index); } - + private void initFields() { openingState_ = java.util.Collections.emptyList(); } @@ -4326,11 +5374,11 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4339,12 +5387,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -4359,14 +5407,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4376,7 +5424,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; - + boolean result = true; result = result && getOpeningStateList() .equals(other.getOpeningStateList()); @@ -4384,9 +5432,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getOpeningStateCount() > 0) { @@ -4394,89 +5446,79 @@ public final class AdminProtos { hash = (53 * hash) + hashEnumList(getOpeningStateList()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code OpenRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { @@ -4484,18 +5526,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4506,27 +5551,27 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); openingState_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -4534,17 +5579,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); int from_bitField0_ = bitField0_; @@ -4556,7 +5591,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); @@ -4565,7 +5600,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; if (!other.openingState_.isEmpty()) { @@ -4581,65 +5616,30 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - break; - } - case 10: { - int length = input.readRawVarint32(); - int oldLimit = input.pushLimit(length); - while(input.getBytesUntilLimit() > 0) { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - } - input.popLimit(oldLimit); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; private java.util.List openingState_ = java.util.Collections.emptyList(); @@ -4649,15 +5649,27 @@ public final class AdminProtos { bitField0_ |= 0x00000001; } } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public java.util.List getOpeningStateList() { return java.util.Collections.unmodifiableList(openingState_); } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public int getOpeningStateCount() { return openingState_.size(); } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { return openingState_.get(index); } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public Builder setOpeningState( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { if (value == null) { @@ -4668,6 +5680,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { if (value == null) { throw new NullPointerException(); @@ -4677,6 +5692,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public Builder addAllOpeningState( java.lang.Iterable values) { ensureOpeningStateIsMutable(); @@ -4684,120 +5702,287 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + */ public Builder clearOpeningState() { openingState_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:OpenRegionResponse) } - + static { defaultInstance = new OpenRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:OpenRegionResponse) } - + public interface CloseRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional uint32 versionOfClosingNode = 2; + /** + * optional uint32 versionOfClosingNode = 2; + */ boolean hasVersionOfClosingNode(); + /** + * optional uint32 versionOfClosingNode = 2; + */ int getVersionOfClosingNode(); - + // optional bool transitionInZK = 3 [default = true]; + /** + * optional bool transitionInZK = 3 [default = true]; + */ boolean hasTransitionInZK(); + /** + * optional bool transitionInZK = 3 [default = true]; + */ boolean getTransitionInZK(); - + // optional .ServerName destinationServer = 4; + /** + * optional .ServerName destinationServer = 4; + */ boolean hasDestinationServer(); + /** + * optional .ServerName destinationServer = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer(); + /** + * optional .ServerName destinationServer = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder(); } + /** + * Protobuf type {@code CloseRegionRequest} + * + *
+   **
+   * Closes the specified region and will use or not use ZK during the close
+   * according to the specified flag.
+   * 
+ */ public static final class CloseRegionRequest extends com.google.protobuf.GeneratedMessage implements CloseRegionRequestOrBuilder { // Use CloseRegionRequest.newBuilder() to construct. - private CloseRegionRequest(Builder builder) { + private CloseRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CloseRegionRequest(boolean noInit) {} - + private CloseRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CloseRegionRequest defaultInstance; public static CloseRegionRequest getDefaultInstance() { return defaultInstance; } - + public CloseRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CloseRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + transitionInZK_ = input.readBool(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + subBuilder = destinationServer_.toBuilder(); + } + destinationServer_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(destinationServer_); + destinationServer_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000008; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CloseRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CloseRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional uint32 versionOfClosingNode = 2; public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; private int versionOfClosingNode_; + /** + * optional uint32 versionOfClosingNode = 2; + */ public boolean hasVersionOfClosingNode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 versionOfClosingNode = 2; + */ public int getVersionOfClosingNode() { return versionOfClosingNode_; } - + // optional bool transitionInZK = 3 [default = true]; public static final int TRANSITIONINZK_FIELD_NUMBER = 3; private boolean transitionInZK_; + /** + * optional bool transitionInZK = 3 [default = true]; + */ public boolean hasTransitionInZK() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool transitionInZK = 3 [default = true]; + */ public boolean getTransitionInZK() { return transitionInZK_; } - + // optional .ServerName destinationServer = 4; public static final int DESTINATIONSERVER_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_; + /** + * optional .ServerName destinationServer = 4; + */ public boolean hasDestinationServer() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .ServerName destinationServer = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { return destinationServer_; } + /** + * optional .ServerName destinationServer = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { return destinationServer_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); versionOfClosingNode_ = 0; @@ -4808,7 +5993,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -4826,7 +6011,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4844,12 +6029,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4871,14 +6056,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4888,7 +6073,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -4914,9 +6099,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -4936,89 +6125,85 @@ public final class AdminProtos { hash = (53 * hash) + getDestinationServer().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CloseRegionRequest} + * + *
+     **
+     * Closes the specified region and will use or not use ZK during the close
+     * according to the specified flag.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { @@ -5026,18 +6211,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5050,7 +6238,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -5071,20 +6259,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -5092,17 +6280,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); int from_bitField0_ = bitField0_; @@ -5135,7 +6313,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); @@ -5144,7 +6322,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -5162,7 +6340,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -5180,71 +6358,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfClosingNode_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - transitionInZK_ = input.readBool(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasDestinationServer()) { - subBuilder.mergeFrom(getDestinationServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setDestinationServer(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -5252,6 +6398,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -5265,6 +6414,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -5276,6 +6428,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -5292,6 +6447,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -5302,11 +6460,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -5314,6 +6478,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -5327,56 +6494,86 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional uint32 versionOfClosingNode = 2; private int versionOfClosingNode_ ; + /** + * optional uint32 versionOfClosingNode = 2; + */ public boolean hasVersionOfClosingNode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 versionOfClosingNode = 2; + */ public int getVersionOfClosingNode() { return versionOfClosingNode_; } + /** + * optional uint32 versionOfClosingNode = 2; + */ public Builder setVersionOfClosingNode(int value) { bitField0_ |= 0x00000002; versionOfClosingNode_ = value; onChanged(); return this; } + /** + * optional uint32 versionOfClosingNode = 2; + */ public Builder clearVersionOfClosingNode() { bitField0_ = (bitField0_ & ~0x00000002); versionOfClosingNode_ = 0; onChanged(); return this; } - + // optional bool transitionInZK = 3 [default = true]; private boolean transitionInZK_ = true; + /** + * optional bool transitionInZK = 3 [default = true]; + */ public boolean hasTransitionInZK() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool transitionInZK = 3 [default = true]; + */ public boolean getTransitionInZK() { return transitionInZK_; } + /** + * optional bool transitionInZK = 3 [default = true]; + */ public Builder setTransitionInZK(boolean value) { bitField0_ |= 0x00000004; transitionInZK_ = value; onChanged(); return this; } + /** + * optional bool transitionInZK = 3 [default = true]; + */ public Builder clearTransitionInZK() { bitField0_ = (bitField0_ & ~0x00000004); transitionInZK_ = true; onChanged(); return this; } - + // optional .ServerName destinationServer = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_; + /** + * optional .ServerName destinationServer = 4; + */ public boolean hasDestinationServer() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .ServerName destinationServer = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { if (destinationServerBuilder_ == null) { return destinationServer_; @@ -5384,6 +6581,9 @@ public final class AdminProtos { return destinationServerBuilder_.getMessage(); } } + /** + * optional .ServerName destinationServer = 4; + */ public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destinationServerBuilder_ == null) { if (value == null) { @@ -5397,6 +6597,9 @@ public final class AdminProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .ServerName destinationServer = 4; + */ public Builder setDestinationServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (destinationServerBuilder_ == null) { @@ -5408,6 +6611,9 @@ public final class AdminProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .ServerName destinationServer = 4; + */ public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destinationServerBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && @@ -5424,6 +6630,9 @@ public final class AdminProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .ServerName destinationServer = 4; + */ public Builder clearDestinationServer() { if (destinationServerBuilder_ == null) { destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -5434,11 +6643,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } + /** + * optional .ServerName destinationServer = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() { bitField0_ |= 0x00000008; onChanged(); return getDestinationServerFieldBuilder().getBuilder(); } + /** + * optional .ServerName destinationServer = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { if (destinationServerBuilder_ != null) { return destinationServerBuilder_.getMessageOrBuilder(); @@ -5446,6 +6661,9 @@ public final class AdminProtos { return destinationServer_; } } + /** + * optional .ServerName destinationServer = 4; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestinationServerFieldBuilder() { @@ -5459,64 +6677,143 @@ public final class AdminProtos { } return destinationServerBuilder_; } - + // @@protoc_insertion_point(builder_scope:CloseRegionRequest) } - + static { defaultInstance = new CloseRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CloseRegionRequest) } - + public interface CloseRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool closed = 1; + /** + * required bool closed = 1; + */ boolean hasClosed(); + /** + * required bool closed = 1; + */ boolean getClosed(); } + /** + * Protobuf type {@code CloseRegionResponse} + */ public static final class CloseRegionResponse extends com.google.protobuf.GeneratedMessage implements CloseRegionResponseOrBuilder { // Use CloseRegionResponse.newBuilder() to construct. - private CloseRegionResponse(Builder builder) { + private CloseRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CloseRegionResponse(boolean noInit) {} - + private CloseRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CloseRegionResponse defaultInstance; public static CloseRegionResponse getDefaultInstance() { return defaultInstance; } - + public CloseRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CloseRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + closed_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CloseRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CloseRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool closed = 1; public static final int CLOSED_FIELD_NUMBER = 1; private boolean closed_; + /** + * required bool closed = 1; + */ public boolean hasClosed() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool closed = 1; + */ public boolean getClosed() { return closed_; } - + private void initFields() { closed_ = false; } @@ -5524,7 +6821,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasClosed()) { memoizedIsInitialized = 0; return false; @@ -5532,7 +6829,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5541,12 +6838,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -5556,14 +6853,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5573,7 +6870,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; - + boolean result = true; result = result && (hasClosed() == other.hasClosed()); if (hasClosed()) { @@ -5584,9 +6881,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClosed()) { @@ -5594,89 +6895,79 @@ public final class AdminProtos { hash = (53 * hash) + hashBoolean(getClosed()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CloseRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { @@ -5684,18 +6975,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5706,27 +7000,27 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); closed_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -5734,17 +7028,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); int from_bitField0_ = bitField0_; @@ -5757,7 +7041,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); @@ -5766,7 +7050,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; if (other.hasClosed()) { @@ -5775,7 +7059,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasClosed()) { @@ -5783,137 +7067,251 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - closed_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool closed = 1; private boolean closed_ ; + /** + * required bool closed = 1; + */ public boolean hasClosed() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool closed = 1; + */ public boolean getClosed() { return closed_; } + /** + * required bool closed = 1; + */ public Builder setClosed(boolean value) { bitField0_ |= 0x00000001; closed_ = value; onChanged(); return this; } + /** + * required bool closed = 1; + */ public Builder clearClosed() { bitField0_ = (bitField0_ & ~0x00000001); closed_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CloseRegionResponse) } - + static { defaultInstance = new CloseRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CloseRegionResponse) } - + public interface FlushRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional uint64 ifOlderThanTs = 2; + /** + * optional uint64 ifOlderThanTs = 2; + */ boolean hasIfOlderThanTs(); + /** + * optional uint64 ifOlderThanTs = 2; + */ long getIfOlderThanTs(); } + /** + * Protobuf type {@code FlushRegionRequest} + * + *
+   **
+   * Flushes the MemStore of the specified region.
+   * <p>
+   * This method is synchronous.
+   * 
+ */ public static final class FlushRegionRequest extends com.google.protobuf.GeneratedMessage implements FlushRegionRequestOrBuilder { // Use FlushRegionRequest.newBuilder() to construct. - private FlushRegionRequest(Builder builder) { + private FlushRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FlushRegionRequest(boolean noInit) {} - + private FlushRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FlushRegionRequest defaultInstance; public static FlushRegionRequest getDefaultInstance() { return defaultInstance; } - + public FlushRegionRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - - private int bitField0_; + private FlushRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FlushRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FlushRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional uint64 ifOlderThanTs = 2; public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; private long ifOlderThanTs_; + /** + * optional uint64 ifOlderThanTs = 2; + */ public boolean hasIfOlderThanTs() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 ifOlderThanTs = 2; + */ public long getIfOlderThanTs() { return ifOlderThanTs_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); ifOlderThanTs_ = 0L; @@ -5922,7 +7320,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -5934,7 +7332,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5946,12 +7344,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -5965,14 +7363,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5982,7 +7380,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -5998,9 +7396,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -6012,89 +7414,86 @@ public final class AdminProtos { hash = (53 * hash) + hashLong(getIfOlderThanTs()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FlushRegionRequest} + * + *
+     **
+     * Flushes the MemStore of the specified region.
+     * <p>
+     * This method is synchronous.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { @@ -6102,18 +7501,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6125,7 +7527,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -6138,20 +7540,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -6159,17 +7561,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); int from_bitField0_ = bitField0_; @@ -6190,7 +7582,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); @@ -6199,7 +7591,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -6211,7 +7603,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -6223,57 +7615,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - ifOlderThanTs_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -6281,6 +7655,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -6294,6 +7671,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -6305,6 +7685,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -6321,6 +7704,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -6331,11 +7717,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -6343,6 +7735,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -6356,99 +7751,207 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional uint64 ifOlderThanTs = 2; private long ifOlderThanTs_ ; + /** + * optional uint64 ifOlderThanTs = 2; + */ public boolean hasIfOlderThanTs() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 ifOlderThanTs = 2; + */ public long getIfOlderThanTs() { return ifOlderThanTs_; } + /** + * optional uint64 ifOlderThanTs = 2; + */ public Builder setIfOlderThanTs(long value) { bitField0_ |= 0x00000002; ifOlderThanTs_ = value; onChanged(); return this; } + /** + * optional uint64 ifOlderThanTs = 2; + */ public Builder clearIfOlderThanTs() { bitField0_ = (bitField0_ & ~0x00000002); ifOlderThanTs_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:FlushRegionRequest) } - + static { defaultInstance = new FlushRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FlushRegionRequest) } - + public interface FlushRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint64 lastFlushTime = 1; + /** + * required uint64 lastFlushTime = 1; + */ boolean hasLastFlushTime(); + /** + * required uint64 lastFlushTime = 1; + */ long getLastFlushTime(); - + // optional bool flushed = 2; + /** + * optional bool flushed = 2; + */ boolean hasFlushed(); + /** + * optional bool flushed = 2; + */ boolean getFlushed(); } + /** + * Protobuf type {@code FlushRegionResponse} + */ public static final class FlushRegionResponse extends com.google.protobuf.GeneratedMessage implements FlushRegionResponseOrBuilder { // Use FlushRegionResponse.newBuilder() to construct. - private FlushRegionResponse(Builder builder) { + private FlushRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FlushRegionResponse(boolean noInit) {} - + private FlushRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FlushRegionResponse defaultInstance; public static FlushRegionResponse getDefaultInstance() { return defaultInstance; } - + public FlushRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FlushRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushTime_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + flushed_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FlushRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FlushRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required uint64 lastFlushTime = 1; public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; private long lastFlushTime_; + /** + * required uint64 lastFlushTime = 1; + */ public boolean hasLastFlushTime() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 lastFlushTime = 1; + */ public long getLastFlushTime() { return lastFlushTime_; } - + // optional bool flushed = 2; public static final int FLUSHED_FIELD_NUMBER = 2; private boolean flushed_; + /** + * optional bool flushed = 2; + */ public boolean hasFlushed() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool flushed = 2; + */ public boolean getFlushed() { return flushed_; } - + private void initFields() { lastFlushTime_ = 0L; flushed_ = false; @@ -6457,7 +7960,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLastFlushTime()) { memoizedIsInitialized = 0; return false; @@ -6465,7 +7968,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6477,12 +7980,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6496,14 +7999,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6513,7 +8016,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; - + boolean result = true; result = result && (hasLastFlushTime() == other.hasLastFlushTime()); if (hasLastFlushTime()) { @@ -6529,9 +8032,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLastFlushTime()) { @@ -6543,89 +8050,79 @@ public final class AdminProtos { hash = (53 * hash) + hashBoolean(getFlushed()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FlushRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { @@ -6633,18 +8130,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6655,7 +8155,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); lastFlushTime_ = 0L; @@ -6664,20 +8164,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -6685,17 +8185,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); int from_bitField0_ = bitField0_; @@ -6712,7 +8202,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); @@ -6721,7 +8211,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; if (other.hasLastFlushTime()) { @@ -6733,7 +8223,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLastFlushTime()) { @@ -6741,163 +8231,286 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lastFlushTime_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - flushed_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint64 lastFlushTime = 1; private long lastFlushTime_ ; + /** + * required uint64 lastFlushTime = 1; + */ public boolean hasLastFlushTime() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 lastFlushTime = 1; + */ public long getLastFlushTime() { return lastFlushTime_; } + /** + * required uint64 lastFlushTime = 1; + */ public Builder setLastFlushTime(long value) { bitField0_ |= 0x00000001; lastFlushTime_ = value; onChanged(); return this; } + /** + * required uint64 lastFlushTime = 1; + */ public Builder clearLastFlushTime() { bitField0_ = (bitField0_ & ~0x00000001); lastFlushTime_ = 0L; onChanged(); return this; } - + // optional bool flushed = 2; private boolean flushed_ ; + /** + * optional bool flushed = 2; + */ public boolean hasFlushed() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool flushed = 2; + */ public boolean getFlushed() { return flushed_; } + /** + * optional bool flushed = 2; + */ public Builder setFlushed(boolean value) { bitField0_ |= 0x00000002; flushed_ = value; onChanged(); return this; } + /** + * optional bool flushed = 2; + */ public Builder clearFlushed() { bitField0_ = (bitField0_ & ~0x00000002); flushed_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:FlushRegionResponse) } - + static { defaultInstance = new FlushRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FlushRegionResponse) } - + public interface SplitRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional bytes splitPoint = 2; + /** + * optional bytes splitPoint = 2; + */ boolean hasSplitPoint(); + /** + * optional bytes splitPoint = 2; + */ com.google.protobuf.ByteString getSplitPoint(); } + /** + * Protobuf type {@code SplitRegionRequest} + * + *
+   **
+   * Splits the specified region.
+   * <p>
+   * This method currently flushes the region and then forces a compaction which
+   * will then trigger a split.  The flush is done synchronously but the
+   * compaction is asynchronous.
+   * 
+ */ public static final class SplitRegionRequest extends com.google.protobuf.GeneratedMessage implements SplitRegionRequestOrBuilder { // Use SplitRegionRequest.newBuilder() to construct. - private SplitRegionRequest(Builder builder) { + private SplitRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SplitRegionRequest(boolean noInit) {} - + private SplitRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SplitRegionRequest defaultInstance; public static SplitRegionRequest getDefaultInstance() { return defaultInstance; } - + public SplitRegionRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - - // optional bytes splitPoint = 2; - public static final int SPLITPOINT_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString splitPoint_; - public boolean hasSplitPoint() { + private SplitRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + splitPoint_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SplitRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bytes splitPoint = 2; + public static final int SPLITPOINT_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString splitPoint_; + /** + * optional bytes splitPoint = 2; + */ + public boolean hasSplitPoint() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes splitPoint = 2; + */ public com.google.protobuf.ByteString getSplitPoint() { return splitPoint_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); splitPoint_ = com.google.protobuf.ByteString.EMPTY; @@ -6906,7 +8519,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -6918,7 +8531,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6930,12 +8543,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6949,14 +8562,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6966,7 +8579,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -6982,9 +8595,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -6996,89 +8613,88 @@ public final class AdminProtos { hash = (53 * hash) + getSplitPoint().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SplitRegionRequest} + * + *
+     **
+     * Splits the specified region.
+     * <p>
+     * This method currently flushes the region and then forces a compaction which
+     * will then trigger a split.  The flush is done synchronously but the
+     * compaction is asynchronous.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { @@ -7086,18 +8702,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7109,7 +8728,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -7122,20 +8741,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -7143,17 +8762,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this); int from_bitField0_ = bitField0_; @@ -7174,7 +8783,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other); @@ -7183,7 +8792,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -7195,7 +8804,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -7207,57 +8816,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - splitPoint_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -7265,6 +8856,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -7278,6 +8872,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -7289,6 +8886,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -7305,6 +8905,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -7315,11 +8918,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -7327,6 +8936,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -7340,15 +8952,24 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional bytes splitPoint = 2; private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes splitPoint = 2; + */ public boolean hasSplitPoint() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes splitPoint = 2; + */ public com.google.protobuf.ByteString getSplitPoint() { return splitPoint_; } + /** + * optional bytes splitPoint = 2; + */ public Builder setSplitPoint(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7358,90 +8979,154 @@ public final class AdminProtos { onChanged(); return this; } + /** + * optional bytes splitPoint = 2; + */ public Builder clearSplitPoint() { bitField0_ = (bitField0_ & ~0x00000002); splitPoint_ = getDefaultInstance().getSplitPoint(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SplitRegionRequest) } - + static { defaultInstance = new SplitRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SplitRegionRequest) } - + public interface SplitRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code SplitRegionResponse} + */ public static final class SplitRegionResponse extends com.google.protobuf.GeneratedMessage implements SplitRegionResponseOrBuilder { // Use SplitRegionResponse.newBuilder() to construct. - private SplitRegionResponse(Builder builder) { + private SplitRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SplitRegionResponse(boolean noInit) {} - + private SplitRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SplitRegionResponse defaultInstance; public static SplitRegionResponse getDefaultInstance() { return defaultInstance; } - + public SplitRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SplitRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SplitRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7451,101 +9136,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SplitRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { @@ -7553,18 +9232,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7575,25 +9257,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -7601,23 +9283,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other); @@ -7626,134 +9298,258 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:SplitRegionResponse) } - + static { defaultInstance = new SplitRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SplitRegionResponse) } - + public interface CompactRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional bool major = 2; + /** + * optional bool major = 2; + */ boolean hasMajor(); + /** + * optional bool major = 2; + */ boolean getMajor(); - + // optional bytes family = 3; + /** + * optional bytes family = 3; + */ boolean hasFamily(); + /** + * optional bytes family = 3; + */ com.google.protobuf.ByteString getFamily(); } + /** + * Protobuf type {@code CompactRegionRequest} + * + *
+   **
+   * Compacts the specified region.  Performs a major compaction if specified.
+   * <p>
+   * This method is asynchronous.
+   * 
+ */ public static final class CompactRegionRequest extends com.google.protobuf.GeneratedMessage implements CompactRegionRequestOrBuilder { // Use CompactRegionRequest.newBuilder() to construct. - private CompactRegionRequest(Builder builder) { + private CompactRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CompactRegionRequest(boolean noInit) {} - + private CompactRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CompactRegionRequest defaultInstance; public static CompactRegionRequest getDefaultInstance() { return defaultInstance; } - + public CompactRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CompactRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + major_ = input.readBool(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + family_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CompactRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompactRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional bool major = 2; public static final int MAJOR_FIELD_NUMBER = 2; private boolean major_; + /** + * optional bool major = 2; + */ public boolean hasMajor() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool major = 2; + */ public boolean getMajor() { return major_; } - + // optional bytes family = 3; public static final int FAMILY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString family_; + /** + * optional bytes family = 3; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes family = 3; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); major_ = false; @@ -7763,7 +9559,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -7775,7 +9571,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7790,12 +9586,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7813,14 +9609,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7830,7 +9626,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -7851,9 +9647,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -7869,89 +9669,86 @@ public final class AdminProtos { hash = (53 * hash) + getFamily().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CompactRegionRequest} + * + *
+     **
+     * Compacts the specified region.  Performs a major compaction if specified.
+     * <p>
+     * This method is asynchronous.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { @@ -7959,18 +9756,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7982,7 +9782,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -7997,20 +9797,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -8018,17 +9818,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this); int from_bitField0_ = bitField0_; @@ -8053,7 +9843,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other); @@ -8062,7 +9852,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -8077,7 +9867,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -8089,62 +9879,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - major_ = input.readBool(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - family_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -8152,6 +9919,9 @@ public final class AdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -8165,6 +9935,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -8176,6 +9949,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -8192,6 +9968,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -8202,11 +9981,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -8214,6 +9999,9 @@ public final class AdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -8227,36 +10015,57 @@ public final class AdminProtos { } return regionBuilder_; } - + // optional bool major = 2; private boolean major_ ; + /** + * optional bool major = 2; + */ public boolean hasMajor() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool major = 2; + */ public boolean getMajor() { return major_; } + /** + * optional bool major = 2; + */ public Builder setMajor(boolean value) { bitField0_ |= 0x00000002; major_ = value; onChanged(); return this; } + /** + * optional bool major = 2; + */ public Builder clearMajor() { bitField0_ = (bitField0_ & ~0x00000002); major_ = false; onChanged(); return this; } - + // optional bytes family = 3; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes family = 3; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes family = 3; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * optional bytes family = 3; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8266,90 +10075,154 @@ public final class AdminProtos { onChanged(); return this; } + /** + * optional bytes family = 3; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000004); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CompactRegionRequest) } - + static { defaultInstance = new CompactRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CompactRegionRequest) } - + public interface CompactRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CompactRegionResponse} + */ public static final class CompactRegionResponse extends com.google.protobuf.GeneratedMessage implements CompactRegionResponseOrBuilder { // Use CompactRegionResponse.newBuilder() to construct. - private CompactRegionResponse(Builder builder) { + private CompactRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CompactRegionResponse(boolean noInit) {} - + private CompactRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CompactRegionResponse defaultInstance; public static CompactRegionResponse getDefaultInstance() { return defaultInstance; } - + public CompactRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CompactRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CompactRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompactRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8359,101 +10232,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CompactRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { @@ -8461,18 +10328,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8483,25 +10353,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -8509,23 +10379,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other); @@ -8534,138 +10394,276 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CompactRegionResponse) } - + static { defaultInstance = new CompactRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CompactRegionResponse) } - + public interface MergeRegionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier regionA = 1; + /** + * required .RegionSpecifier regionA = 1; + */ boolean hasRegionA(); + /** + * required .RegionSpecifier regionA = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA(); + /** + * required .RegionSpecifier regionA = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder(); - + // required .RegionSpecifier regionB = 2; + /** + * required .RegionSpecifier regionB = 2; + */ boolean hasRegionB(); + /** + * required .RegionSpecifier regionB = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB(); + /** + * required .RegionSpecifier regionB = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder(); - + // optional bool forcible = 3 [default = false]; + /** + * optional bool forcible = 3 [default = false]; + */ boolean hasForcible(); + /** + * optional bool forcible = 3 [default = false]; + */ boolean getForcible(); } + /** + * Protobuf type {@code MergeRegionsRequest} + * + *
+   **
+   * Merges the specified regions.
+   * <p>
+   * This method currently closes the regions and then merges them
+   * 
+ */ public static final class MergeRegionsRequest extends com.google.protobuf.GeneratedMessage implements MergeRegionsRequestOrBuilder { // Use MergeRegionsRequest.newBuilder() to construct. - private MergeRegionsRequest(Builder builder) { + private MergeRegionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MergeRegionsRequest(boolean noInit) {} - + private MergeRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MergeRegionsRequest defaultInstance; public static MergeRegionsRequest getDefaultInstance() { return defaultInstance; } - + public MergeRegionsRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MergeRegionsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = regionA_.toBuilder(); + } + regionA_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionA_); + regionA_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = regionB_.toBuilder(); + } + regionB_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionB_); + regionB_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 24: { + bitField0_ |= 0x00000004; + forcible_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MergeRegionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MergeRegionsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier regionA = 1; public static final int REGIONA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_; + /** + * required .RegionSpecifier regionA = 1; + */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { return regionA_; } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { return regionA_; } - + // required .RegionSpecifier regionB = 2; public static final int REGIONB_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_; + /** + * required .RegionSpecifier regionB = 2; + */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { return regionB_; } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { return regionB_; } - + // optional bool forcible = 3 [default = false]; public static final int FORCIBLE_FIELD_NUMBER = 3; private boolean forcible_; + /** + * optional bool forcible = 3 [default = false]; + */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool forcible = 3 [default = false]; + */ public boolean getForcible() { return forcible_; } - + private void initFields() { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -8675,7 +10673,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionA()) { memoizedIsInitialized = 0; return false; @@ -8695,7 +10693,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8710,12 +10708,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8733,14 +10731,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8750,7 +10748,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) obj; - + boolean result = true; result = result && (hasRegionA() == other.hasRegionA()); if (hasRegionA()) { @@ -8771,9 +10769,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionA()) { @@ -8789,89 +10791,86 @@ public final class AdminProtos { hash = (53 * hash) + hashBoolean(getForcible()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MergeRegionsRequest} + * + *
+     **
+     * Merges the specified regions.
+     * <p>
+     * This method currently closes the regions and then merges them
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequestOrBuilder { @@ -8879,18 +10878,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8903,7 +10905,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionABuilder_ == null) { @@ -8922,20 +10924,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -8943,17 +10945,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest(this); int from_bitField0_ = bitField0_; @@ -8982,7 +10974,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest)other); @@ -8991,7 +10983,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.getDefaultInstance()) return this; if (other.hasRegionA()) { @@ -9006,7 +10998,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionA()) { @@ -9026,66 +11018,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionA()) { - subBuilder.mergeFrom(getRegionA()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionA(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionB()) { - subBuilder.mergeFrom(getRegionB()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionB(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - forcible_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier regionA = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_; + /** + * required .RegionSpecifier regionA = 1; + */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { if (regionABuilder_ == null) { return regionA_; @@ -9093,6 +11058,9 @@ public final class AdminProtos { return regionABuilder_.getMessage(); } } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (value == null) { @@ -9106,6 +11074,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder setRegionA( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionABuilder_ == null) { @@ -9117,6 +11088,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -9133,6 +11107,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder clearRegionA() { if (regionABuilder_ == null) { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -9143,11 +11120,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionABuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionAFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { if (regionABuilder_ != null) { return regionABuilder_.getMessageOrBuilder(); @@ -9155,6 +11138,9 @@ public final class AdminProtos { return regionA_; } } + /** + * required .RegionSpecifier regionA = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionAFieldBuilder() { @@ -9168,14 +11154,20 @@ public final class AdminProtos { } return regionABuilder_; } - + // required .RegionSpecifier regionB = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_; + /** + * required .RegionSpecifier regionB = 2; + */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { if (regionBBuilder_ == null) { return regionB_; @@ -9183,6 +11175,9 @@ public final class AdminProtos { return regionBBuilder_.getMessage(); } } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (value == null) { @@ -9196,6 +11191,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder setRegionB( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBBuilder_ == null) { @@ -9207,6 +11205,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -9223,6 +11224,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder clearRegionB() { if (regionBBuilder_ == null) { regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -9233,11 +11237,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionBFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { if (regionBBuilder_ != null) { return regionBBuilder_.getMessageOrBuilder(); @@ -9245,6 +11255,9 @@ public final class AdminProtos { return regionB_; } } + /** + * required .RegionSpecifier regionB = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionBFieldBuilder() { @@ -9258,105 +11271,178 @@ public final class AdminProtos { } return regionBBuilder_; } - + // optional bool forcible = 3 [default = false]; private boolean forcible_ ; + /** + * optional bool forcible = 3 [default = false]; + */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool forcible = 3 [default = false]; + */ public boolean getForcible() { return forcible_; } + /** + * optional bool forcible = 3 [default = false]; + */ public Builder setForcible(boolean value) { bitField0_ |= 0x00000004; forcible_ = value; onChanged(); return this; } + /** + * optional bool forcible = 3 [default = false]; + */ public Builder clearForcible() { bitField0_ = (bitField0_ & ~0x00000004); forcible_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MergeRegionsRequest) } - + static { defaultInstance = new MergeRegionsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MergeRegionsRequest) } - + public interface MergeRegionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code MergeRegionsResponse} + */ public static final class MergeRegionsResponse extends com.google.protobuf.GeneratedMessage implements MergeRegionsResponseOrBuilder { // Use MergeRegionsResponse.newBuilder() to construct. - private MergeRegionsResponse(Builder builder) { + private MergeRegionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MergeRegionsResponse(boolean noInit) {} - + private MergeRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MergeRegionsResponse defaultInstance; public static MergeRegionsResponse getDefaultInstance() { return defaultInstance; } - + public MergeRegionsResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MergeRegionsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MergeRegionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MergeRegionsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9366,101 +11452,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MergeRegionsResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponseOrBuilder { @@ -9468,18 +11548,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9490,25 +11573,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_MergeRegionsResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -9516,23 +11599,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse)other); @@ -9541,116 +11614,202 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:MergeRegionsResponse) } - + static { defaultInstance = new MergeRegionsResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MergeRegionsResponse) } - + public interface UUIDOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint64 leastSigBits = 1; + /** + * required uint64 leastSigBits = 1; + */ boolean hasLeastSigBits(); + /** + * required uint64 leastSigBits = 1; + */ long getLeastSigBits(); - + // required uint64 mostSigBits = 2; + /** + * required uint64 mostSigBits = 2; + */ boolean hasMostSigBits(); + /** + * required uint64 mostSigBits = 2; + */ long getMostSigBits(); } + /** + * Protobuf type {@code UUID} + */ public static final class UUID extends com.google.protobuf.GeneratedMessage implements UUIDOrBuilder { // Use UUID.newBuilder() to construct. - private UUID(Builder builder) { + private UUID(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UUID(boolean noInit) {} - + private UUID(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UUID defaultInstance; public static UUID getDefaultInstance() { return defaultInstance; } - + public UUID getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UUID( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + leastSigBits_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + mostSigBits_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UUID parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UUID(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required uint64 leastSigBits = 1; public static final int LEASTSIGBITS_FIELD_NUMBER = 1; private long leastSigBits_; + /** + * required uint64 leastSigBits = 1; + */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 leastSigBits = 1; + */ public long getLeastSigBits() { return leastSigBits_; } - + // required uint64 mostSigBits = 2; public static final int MOSTSIGBITS_FIELD_NUMBER = 2; private long mostSigBits_; + /** + * required uint64 mostSigBits = 2; + */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required uint64 mostSigBits = 2; + */ public long getMostSigBits() { return mostSigBits_; } - + private void initFields() { leastSigBits_ = 0L; mostSigBits_ = 0L; @@ -9659,7 +11818,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLeastSigBits()) { memoizedIsInitialized = 0; return false; @@ -9671,7 +11830,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9683,12 +11842,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9702,14 +11861,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9719,7 +11878,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; - + boolean result = true; result = result && (hasLeastSigBits() == other.hasLeastSigBits()); if (hasLeastSigBits()) { @@ -9735,9 +11894,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLeastSigBits()) { @@ -9749,89 +11912,79 @@ public final class AdminProtos { hash = (53 * hash) + hashLong(getMostSigBits()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UUID} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { @@ -9839,18 +11992,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9861,7 +12017,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); leastSigBits_ = 0L; @@ -9870,20 +12026,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); if (!result.isInitialized()) { @@ -9891,17 +12047,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); int from_bitField0_ = bitField0_; @@ -9918,7 +12064,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); @@ -9927,7 +12073,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; if (other.hasLeastSigBits()) { @@ -9939,7 +12085,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLeastSigBits()) { @@ -9951,246 +12097,525 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - leastSigBits_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - mostSigBits_ = input.readUInt64(); - break; - } + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint64 leastSigBits = 1; private long leastSigBits_ ; + /** + * required uint64 leastSigBits = 1; + */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 leastSigBits = 1; + */ public long getLeastSigBits() { return leastSigBits_; } + /** + * required uint64 leastSigBits = 1; + */ public Builder setLeastSigBits(long value) { bitField0_ |= 0x00000001; leastSigBits_ = value; onChanged(); return this; } + /** + * required uint64 leastSigBits = 1; + */ public Builder clearLeastSigBits() { bitField0_ = (bitField0_ & ~0x00000001); leastSigBits_ = 0L; onChanged(); return this; } - + // required uint64 mostSigBits = 2; private long mostSigBits_ ; + /** + * required uint64 mostSigBits = 2; + */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required uint64 mostSigBits = 2; + */ public long getMostSigBits() { return mostSigBits_; } + /** + * required uint64 mostSigBits = 2; + */ public Builder setMostSigBits(long value) { bitField0_ |= 0x00000002; mostSigBits_ = value; onChanged(); return this; } + /** + * required uint64 mostSigBits = 2; + */ public Builder clearMostSigBits() { bitField0_ = (bitField0_ & ~0x00000002); mostSigBits_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:UUID) } - + static { defaultInstance = new UUID(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UUID) } - + public interface WALEntryOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .WALEntry.WALKey key = 1; + /** + * required .WALEntry.WALKey key = 1; + */ boolean hasKey(); + /** + * required .WALEntry.WALKey key = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey(); + /** + * required .WALEntry.WALKey key = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder(); - + // required .WALEntry.WALEdit edit = 2; + /** + * required .WALEntry.WALEdit edit = 2; + */ boolean hasEdit(); + /** + * required .WALEntry.WALEdit edit = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); + /** + * required .WALEntry.WALEdit edit = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); } + /** + * Protobuf type {@code WALEntry} + * + *
+   * Protocol buffer version of HLog
+   * 
+ */ public static final class WALEntry extends com.google.protobuf.GeneratedMessage implements WALEntryOrBuilder { // Use WALEntry.newBuilder() to construct. - private WALEntry(Builder builder) { + private WALEntry(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WALEntry(boolean noInit) {} - + private WALEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WALEntry defaultInstance; public static WALEntry getDefaultInstance() { return defaultInstance; } - + public WALEntry getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WALEntry( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = key_.toBuilder(); + } + key_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(key_); + key_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = edit_.toBuilder(); + } + edit_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(edit_); + edit_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WALEntry parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALEntry(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + public interface WALKeyOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes encodedRegionName = 1; + /** + * required bytes encodedRegionName = 1; + */ boolean hasEncodedRegionName(); + /** + * required bytes encodedRegionName = 1; + */ com.google.protobuf.ByteString getEncodedRegionName(); - + // required bytes tableName = 2; + /** + * required bytes tableName = 2; + */ boolean hasTableName(); + /** + * required bytes tableName = 2; + */ com.google.protobuf.ByteString getTableName(); - + // required uint64 logSequenceNumber = 3; + /** + * required uint64 logSequenceNumber = 3; + */ boolean hasLogSequenceNumber(); + /** + * required uint64 logSequenceNumber = 3; + */ long getLogSequenceNumber(); - + // required uint64 writeTime = 4; + /** + * required uint64 writeTime = 4; + */ boolean hasWriteTime(); + /** + * required uint64 writeTime = 4; + */ long getWriteTime(); - + // optional .UUID clusterId = 5; + /** + * optional .UUID clusterId = 5; + */ boolean hasClusterId(); + /** + * optional .UUID clusterId = 5; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); + /** + * optional .UUID clusterId = 5; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); } + /** + * Protobuf type {@code WALEntry.WALKey} + * + *
+     * Protocol buffer version of HLogKey
+     * 
+ */ public static final class WALKey extends com.google.protobuf.GeneratedMessage implements WALKeyOrBuilder { // Use WALKey.newBuilder() to construct. - private WALKey(Builder builder) { + private WALKey(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WALKey(boolean noInit) {} - + private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WALKey defaultInstance; public static WALKey getDefaultInstance() { return defaultInstance; } - + public WALKey getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WALKey( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + encodedRegionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + writeTime_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = null; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + subBuilder = clusterId_.toBuilder(); + } + clusterId_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(clusterId_); + clusterId_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000010; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WALKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALKey(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes encodedRegionName = 1; public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString encodedRegionName_; + /** + * required bytes encodedRegionName = 1; + */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes encodedRegionName = 1; + */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } - + // required bytes tableName = 2; public static final int TABLENAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 2; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes tableName = 2; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + // required uint64 logSequenceNumber = 3; public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; private long logSequenceNumber_; + /** + * required uint64 logSequenceNumber = 3; + */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 logSequenceNumber = 3; + */ public long getLogSequenceNumber() { return logSequenceNumber_; } - + // required uint64 writeTime = 4; public static final int WRITETIME_FIELD_NUMBER = 4; private long writeTime_; + /** + * required uint64 writeTime = 4; + */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required uint64 writeTime = 4; + */ public long getWriteTime() { return writeTime_; } - + // optional .UUID clusterId = 5; public static final int CLUSTERID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; + /** + * optional .UUID clusterId = 5; + */ public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .UUID clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { return clusterId_; } + /** + * optional .UUID clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { return clusterId_; } - + private void initFields() { encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -10202,7 +12627,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; @@ -10228,7 +12653,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10249,12 +12674,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -10280,14 +12705,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10297,7 +12722,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; - + boolean result = true; result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { @@ -10328,9 +12753,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEncodedRegionName()) { @@ -10354,89 +12783,83 @@ public final class AdminProtos { hash = (53 * hash) + getClusterId().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WALEntry.WALKey} + * + *
+       * Protocol buffer version of HLogKey
+       * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { @@ -10444,18 +12867,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10467,7 +12893,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; @@ -10486,20 +12912,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); if (!result.isInitialized()) { @@ -10507,17 +12933,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); int from_bitField0_ = bitField0_; @@ -10550,7 +12966,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); @@ -10559,7 +12975,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { @@ -10580,7 +12996,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasEncodedRegionName()) { @@ -10606,73 +13022,43 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - encodedRegionName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - logSequenceNumber_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - writeTime_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes encodedRegionName = 1; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes encodedRegionName = 1; + */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes encodedRegionName = 1; + */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } + /** + * required bytes encodedRegionName = 1; + */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10682,21 +13068,33 @@ public final class AdminProtos { onChanged(); return this; } + /** + * required bytes encodedRegionName = 1; + */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } - + // required bytes tableName = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 2; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes tableName = 2; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 2; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10706,62 +13104,95 @@ public final class AdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 2; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // required uint64 logSequenceNumber = 3; private long logSequenceNumber_ ; + /** + * required uint64 logSequenceNumber = 3; + */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 logSequenceNumber = 3; + */ public long getLogSequenceNumber() { return logSequenceNumber_; } + /** + * required uint64 logSequenceNumber = 3; + */ public Builder setLogSequenceNumber(long value) { bitField0_ |= 0x00000004; logSequenceNumber_ = value; onChanged(); return this; } + /** + * required uint64 logSequenceNumber = 3; + */ public Builder clearLogSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000004); logSequenceNumber_ = 0L; onChanged(); return this; } - + // required uint64 writeTime = 4; private long writeTime_ ; + /** + * required uint64 writeTime = 4; + */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required uint64 writeTime = 4; + */ public long getWriteTime() { return writeTime_; } + /** + * required uint64 writeTime = 4; + */ public Builder setWriteTime(long value) { bitField0_ |= 0x00000008; writeTime_ = value; onChanged(); return this; } + /** + * required uint64 writeTime = 4; + */ public Builder clearWriteTime() { bitField0_ = (bitField0_ & ~0x00000008); writeTime_ = 0L; onChanged(); return this; } - + // optional .UUID clusterId = 5; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; + /** + * optional .UUID clusterId = 5; + */ public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .UUID clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { if (clusterIdBuilder_ == null) { return clusterId_; @@ -10769,6 +13200,9 @@ public final class AdminProtos { return clusterIdBuilder_.getMessage(); } } + /** + * optional .UUID clusterId = 5; + */ public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { if (clusterIdBuilder_ == null) { if (value == null) { @@ -10782,6 +13216,9 @@ public final class AdminProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .UUID clusterId = 5; + */ public Builder setClusterId( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { if (clusterIdBuilder_ == null) { @@ -10793,6 +13230,9 @@ public final class AdminProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .UUID clusterId = 5; + */ public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -10809,6 +13249,9 @@ public final class AdminProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .UUID clusterId = 5; + */ public Builder clearClusterId() { if (clusterIdBuilder_ == null) { clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); @@ -10819,11 +13262,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * optional .UUID clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; onChanged(); return getClusterIdFieldBuilder().getBuilder(); } + /** + * optional .UUID clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); @@ -10831,6 +13280,9 @@ public final class AdminProtos { return clusterId_; } } + /** + * optional .UUID clusterId = 5; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> getClusterIdFieldBuilder() { @@ -10844,76 +13296,199 @@ public final class AdminProtos { } return clusterIdBuilder_; } - + // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) } - + static { defaultInstance = new WALKey(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WALEntry.WALKey) } - + public interface WALEditOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated bytes keyValueBytes = 1; + /** + * repeated bytes keyValueBytes = 1; + */ java.util.List getKeyValueBytesList(); + /** + * repeated bytes keyValueBytes = 1; + */ int getKeyValueBytesCount(); + /** + * repeated bytes keyValueBytes = 1; + */ com.google.protobuf.ByteString getKeyValueBytes(int index); - + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ java.util.List getFamilyScopeList(); + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ int getFamilyScopeCount(); + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ java.util.List getFamilyScopeOrBuilderList(); + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index); } + /** + * Protobuf type {@code WALEntry.WALEdit} + */ public static final class WALEdit extends com.google.protobuf.GeneratedMessage implements WALEditOrBuilder { // Use WALEdit.newBuilder() to construct. - private WALEdit(Builder builder) { + private WALEdit(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WALEdit(boolean noInit) {} - + private WALEdit(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WALEdit defaultInstance; public static WALEdit getDefaultInstance() { return defaultInstance; } - + public WALEdit getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WALEdit( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + keyValueBytes_.add(input.readBytes()); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + familyScope_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + } + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = java.util.Collections.unmodifiableList(familyScope_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WALEdit parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALEdit(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code WALEntry.WALEdit.ScopeType} + */ public enum ScopeType implements com.google.protobuf.ProtocolMessageEnum { + /** + * REPLICATION_SCOPE_LOCAL = 0; + */ REPLICATION_SCOPE_LOCAL(0, 0), + /** + * REPLICATION_SCOPE_GLOBAL = 1; + */ REPLICATION_SCOPE_GLOBAL(1, 1), ; - + + /** + * REPLICATION_SCOPE_LOCAL = 0; + */ public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; + /** + * REPLICATION_SCOPE_GLOBAL = 1; + */ public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; - - + + public final int getNumber() { return value; } - + public static ScopeType valueOf(int value) { switch (value) { case 0: return REPLICATION_SCOPE_LOCAL; @@ -10921,7 +13496,7 @@ public final class AdminProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -10933,7 +13508,7 @@ public final class AdminProtos { return ScopeType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -10946,11 +13521,9 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); } - - private static final ScopeType[] VALUES = { - REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, - }; - + + private static final ScopeType[] VALUES = values(); + public static ScopeType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -10959,78 +13532,180 @@ public final class AdminProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private ScopeType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) } - + public interface FamilyScopeOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes family = 1; + /** + * required bytes family = 1; + */ boolean hasFamily(); + /** + * required bytes family = 1; + */ com.google.protobuf.ByteString getFamily(); - + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ boolean hasScopeType(); + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); } + /** + * Protobuf type {@code WALEntry.WALEdit.FamilyScope} + */ public static final class FamilyScope extends com.google.protobuf.GeneratedMessage implements FamilyScopeOrBuilder { // Use FamilyScope.newBuilder() to construct. - private FamilyScope(Builder builder) { + private FamilyScope(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FamilyScope(boolean noInit) {} - + private FamilyScope(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FamilyScope defaultInstance; public static FamilyScope getDefaultInstance() { return defaultInstance; } - + public FamilyScope getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FamilyScope( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + scopeType_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FamilyScope parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyScope(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // required .WALEntry.WALEdit.ScopeType scopeType = 2; public static final int SCOPETYPE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { return scopeType_; } - + private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; @@ -11039,7 +13714,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFamily()) { memoizedIsInitialized = 0; return false; @@ -11051,7 +13726,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11063,12 +13738,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -11082,14 +13757,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11099,7 +13774,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; - + boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { @@ -11115,9 +13790,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { @@ -11129,89 +13808,79 @@ public final class AdminProtos { hash = (53 * hash) + hashEnum(getScopeType()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WALEntry.WALEdit.FamilyScope} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { @@ -11219,18 +13888,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11241,7 +13913,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -11250,20 +13922,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); if (!result.isInitialized()) { @@ -11271,17 +13943,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); int from_bitField0_ = bitField0_; @@ -11298,7 +13960,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); @@ -11307,7 +13969,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; if (other.hasFamily()) { @@ -11319,7 +13981,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFamily()) { @@ -11331,60 +13993,43 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - scopeType_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 1; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11394,21 +14039,33 @@ public final class AdminProtos { onChanged(); return this; } + /** + * required bytes family = 1; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // required .WALEntry.WALEdit.ScopeType scopeType = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { return scopeType_; } + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { if (value == null) { throw new NullPointerException(); @@ -11418,68 +14075,95 @@ public final class AdminProtos { onChanged(); return this; } + /** + * required .WALEntry.WALEdit.ScopeType scopeType = 2; + */ public Builder clearScopeType() { bitField0_ = (bitField0_ & ~0x00000002); scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) } - + static { defaultInstance = new FamilyScope(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) } - + // repeated bytes keyValueBytes = 1; public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; private java.util.List keyValueBytes_; + /** + * repeated bytes keyValueBytes = 1; + */ public java.util.List getKeyValueBytesList() { return keyValueBytes_; } + /** + * repeated bytes keyValueBytes = 1; + */ public int getKeyValueBytesCount() { return keyValueBytes_.size(); } + /** + * repeated bytes keyValueBytes = 1; + */ public com.google.protobuf.ByteString getKeyValueBytes(int index) { return keyValueBytes_.get(index); } - + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; public static final int FAMILYSCOPE_FIELD_NUMBER = 2; private java.util.List familyScope_; + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public java.util.List getFamilyScopeList() { return familyScope_; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public java.util.List getFamilyScopeOrBuilderList() { return familyScope_; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public int getFamilyScopeCount() { return familyScope_.size(); } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { return familyScope_.get(index); } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index) { return familyScope_.get(index); } - + private void initFields() { - keyValueBytes_ = java.util.Collections.emptyList();; + keyValueBytes_ = java.util.Collections.emptyList(); familyScope_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getFamilyScopeCount(); i++) { if (!getFamilyScope(i).isInitialized()) { memoizedIsInitialized = 0; @@ -11489,7 +14173,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11501,12 +14185,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -11525,14 +14209,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11542,7 +14226,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; - + boolean result = true; result = result && getKeyValueBytesList() .equals(other.getKeyValueBytesList()); @@ -11552,9 +14236,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getKeyValueBytesCount() > 0) { @@ -11566,89 +14254,79 @@ public final class AdminProtos { hash = (53 * hash) + getFamilyScopeList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WALEntry.WALEdit} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { @@ -11656,18 +14334,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11679,10 +14360,10 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - keyValueBytes_ = java.util.Collections.emptyList();; + keyValueBytes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); if (familyScopeBuilder_ == null) { familyScope_ = java.util.Collections.emptyList(); @@ -11692,20 +14373,20 @@ public final class AdminProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); if (!result.isInitialized()) { @@ -11713,17 +14394,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); int from_bitField0_ = bitField0_; @@ -11744,7 +14415,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); @@ -11753,7 +14424,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; if (!other.keyValueBytes_.isEmpty()) { @@ -11795,7 +14466,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getFamilyScopeCount(); i++) { if (!getFamilyScope(i).isInitialized()) { @@ -11805,65 +14476,56 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(input.readBytes()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyScope(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated bytes keyValueBytes = 1; - private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList(); private void ensureKeyValueBytesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); bitField0_ |= 0x00000001; } } + /** + * repeated bytes keyValueBytes = 1; + */ public java.util.List getKeyValueBytesList() { return java.util.Collections.unmodifiableList(keyValueBytes_); } + /** + * repeated bytes keyValueBytes = 1; + */ public int getKeyValueBytesCount() { return keyValueBytes_.size(); } + /** + * repeated bytes keyValueBytes = 1; + */ public com.google.protobuf.ByteString getKeyValueBytes(int index) { return keyValueBytes_.get(index); } + /** + * repeated bytes keyValueBytes = 1; + */ public Builder setKeyValueBytes( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -11874,6 +14536,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes keyValueBytes = 1; + */ public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11883,6 +14548,9 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes keyValueBytes = 1; + */ public Builder addAllKeyValueBytes( java.lang.Iterable values) { ensureKeyValueBytesIsMutable(); @@ -11890,13 +14558,16 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes keyValueBytes = 1; + */ public Builder clearKeyValueBytes() { - keyValueBytes_ = java.util.Collections.emptyList();; + keyValueBytes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; private java.util.List familyScope_ = java.util.Collections.emptyList(); @@ -11906,10 +14577,13 @@ public final class AdminProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; - + + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public java.util.List getFamilyScopeList() { if (familyScopeBuilder_ == null) { return java.util.Collections.unmodifiableList(familyScope_); @@ -11917,6 +14591,9 @@ public final class AdminProtos { return familyScopeBuilder_.getMessageList(); } } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public int getFamilyScopeCount() { if (familyScopeBuilder_ == null) { return familyScope_.size(); @@ -11924,6 +14601,9 @@ public final class AdminProtos { return familyScopeBuilder_.getCount(); } } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { if (familyScopeBuilder_ == null) { return familyScope_.get(index); @@ -11931,6 +14611,9 @@ public final class AdminProtos { return familyScopeBuilder_.getMessage(index); } } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder setFamilyScope( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { @@ -11945,6 +14628,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder setFamilyScope( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { @@ -11956,6 +14642,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { if (value == null) { @@ -11969,6 +14658,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder addFamilyScope( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { @@ -11983,6 +14675,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder addFamilyScope( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { @@ -11994,6 +14689,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder addFamilyScope( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { @@ -12005,6 +14703,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder addAllFamilyScope( java.lang.Iterable values) { if (familyScopeBuilder_ == null) { @@ -12016,6 +14717,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder clearFamilyScope() { if (familyScopeBuilder_ == null) { familyScope_ = java.util.Collections.emptyList(); @@ -12026,6 +14730,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public Builder removeFamilyScope(int index) { if (familyScopeBuilder_ == null) { ensureFamilyScopeIsMutable(); @@ -12036,10 +14743,16 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( int index) { return getFamilyScopeFieldBuilder().getBuilder(index); } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index) { if (familyScopeBuilder_ == null) { @@ -12047,6 +14760,9 @@ public final class AdminProtos { return familyScopeBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public java.util.List getFamilyScopeOrBuilderList() { if (familyScopeBuilder_ != null) { @@ -12055,15 +14771,24 @@ public final class AdminProtos { return java.util.Collections.unmodifiableList(familyScope_); } } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { return getFamilyScopeFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( int index) { return getFamilyScopeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); } + /** + * repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + */ public java.util.List getFamilyScopeBuilderList() { return getFamilyScopeFieldBuilder().getBuilderList(); @@ -12082,45 +14807,63 @@ public final class AdminProtos { } return familyScopeBuilder_; } - + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) } - + static { defaultInstance = new WALEdit(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) } - + private int bitField0_; // required .WALEntry.WALKey key = 1; public static final int KEY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_; + /** + * required .WALEntry.WALKey key = 1; + */ public boolean hasKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .WALEntry.WALKey key = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { return key_; } + /** + * required .WALEntry.WALKey key = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { return key_; } - + // required .WALEntry.WALEdit edit = 2; public static final int EDIT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; + /** + * required .WALEntry.WALEdit edit = 2; + */ public boolean hasEdit() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .WALEntry.WALEdit edit = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { return edit_; } + /** + * required .WALEntry.WALEdit edit = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { return edit_; } - + private void initFields() { key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); @@ -12129,7 +14872,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasKey()) { memoizedIsInitialized = 0; return false; @@ -12149,7 +14892,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12161,12 +14904,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12180,14 +14923,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12197,7 +14940,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; - + boolean result = true; result = result && (hasKey() == other.hasKey()); if (hasKey()) { @@ -12213,9 +14956,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasKey()) { @@ -12227,89 +14974,83 @@ public final class AdminProtos { hash = (53 * hash) + getEdit().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WALEntry} + * + *
+     * Protocol buffer version of HLog
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder { @@ -12317,18 +15058,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12341,7 +15085,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (keyBuilder_ == null) { @@ -12358,38 +15102,28 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); + throw newUninitializedMessageException(result); } return result; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this); int from_bitField0_ = bitField0_; @@ -12414,7 +15148,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other); @@ -12423,7 +15157,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; if (other.hasKey()) { @@ -12435,7 +15169,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasKey()) { @@ -12455,61 +15189,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); - if (hasKey()) { - subBuilder.mergeFrom(getKey()); - } - input.readMessage(subBuilder, extensionRegistry); - setKey(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); - if (hasEdit()) { - subBuilder.mergeFrom(getEdit()); - } - input.readMessage(subBuilder, extensionRegistry); - setEdit(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .WALEntry.WALKey key = 1; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> keyBuilder_; + /** + * required .WALEntry.WALKey key = 1; + */ public boolean hasKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .WALEntry.WALKey key = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { if (keyBuilder_ == null) { return key_; @@ -12517,6 +15229,9 @@ public final class AdminProtos { return keyBuilder_.getMessage(); } } + /** + * required .WALEntry.WALKey key = 1; + */ public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { if (keyBuilder_ == null) { if (value == null) { @@ -12530,6 +15245,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .WALEntry.WALKey key = 1; + */ public Builder setKey( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { if (keyBuilder_ == null) { @@ -12541,6 +15259,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .WALEntry.WALKey key = 1; + */ public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { if (keyBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -12557,6 +15278,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .WALEntry.WALKey key = 1; + */ public Builder clearKey() { if (keyBuilder_ == null) { key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); @@ -12567,11 +15291,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .WALEntry.WALKey key = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getKeyFieldBuilder().getBuilder(); } + /** + * required .WALEntry.WALKey key = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { if (keyBuilder_ != null) { return keyBuilder_.getMessageOrBuilder(); @@ -12579,6 +15309,9 @@ public final class AdminProtos { return key_; } } + /** + * required .WALEntry.WALKey key = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> getKeyFieldBuilder() { @@ -12592,14 +15325,20 @@ public final class AdminProtos { } return keyBuilder_; } - + // required .WALEntry.WALEdit edit = 2; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; + /** + * required .WALEntry.WALEdit edit = 2; + */ public boolean hasEdit() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .WALEntry.WALEdit edit = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { if (editBuilder_ == null) { return edit_; @@ -12607,6 +15346,9 @@ public final class AdminProtos { return editBuilder_.getMessage(); } } + /** + * required .WALEntry.WALEdit edit = 2; + */ public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { if (editBuilder_ == null) { if (value == null) { @@ -12620,6 +15362,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .WALEntry.WALEdit edit = 2; + */ public Builder setEdit( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { if (editBuilder_ == null) { @@ -12631,6 +15376,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .WALEntry.WALEdit edit = 2; + */ public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { if (editBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -12647,6 +15395,9 @@ public final class AdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .WALEntry.WALEdit edit = 2; + */ public Builder clearEdit() { if (editBuilder_ == null) { edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); @@ -12657,11 +15408,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .WALEntry.WALEdit edit = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEditFieldBuilder().getBuilder(); } + /** + * required .WALEntry.WALEdit edit = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { if (editBuilder_ != null) { return editBuilder_.getMessageOrBuilder(); @@ -12669,6 +15426,9 @@ public final class AdminProtos { return edit_; } } + /** + * required .WALEntry.WALEdit edit = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> getEditFieldBuilder() { @@ -12682,80 +15442,191 @@ public final class AdminProtos { } return editBuilder_; } - + // @@protoc_insertion_point(builder_scope:WALEntry) } - + static { defaultInstance = new WALEntry(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WALEntry) } - + public interface ReplicateWALEntryRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .WALEntry entry = 1; + /** + * repeated .WALEntry entry = 1; + */ java.util.List getEntryList(); + /** + * repeated .WALEntry entry = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index); + /** + * repeated .WALEntry entry = 1; + */ int getEntryCount(); + /** + * repeated .WALEntry entry = 1; + */ java.util.List getEntryOrBuilderList(); + /** + * repeated .WALEntry entry = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( int index); } + /** + * Protobuf type {@code ReplicateWALEntryRequest} + * + *
+   **
+   * Replicates the given entries. The guarantee is that the given entries
+   * will be durable on the slave cluster if this method returns without
+   * any exception.
+   * hbase.replication has to be set to true for this to work.
+   * 
+ */ public static final class ReplicateWALEntryRequest extends com.google.protobuf.GeneratedMessage implements ReplicateWALEntryRequestOrBuilder { // Use ReplicateWALEntryRequest.newBuilder() to construct. - private ReplicateWALEntryRequest(Builder builder) { + private ReplicateWALEntryRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicateWALEntryRequest(boolean noInit) {} - + private ReplicateWALEntryRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicateWALEntryRequest defaultInstance; public static ReplicateWALEntryRequest getDefaultInstance() { return defaultInstance; } - + public ReplicateWALEntryRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicateWALEntryRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + entry_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + entry_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + entry_ = java.util.Collections.unmodifiableList(entry_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicateWALEntryRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicateWALEntryRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .WALEntry entry = 1; public static final int ENTRY_FIELD_NUMBER = 1; private java.util.List entry_; + /** + * repeated .WALEntry entry = 1; + */ public java.util.List getEntryList() { return entry_; } + /** + * repeated .WALEntry entry = 1; + */ public java.util.List getEntryOrBuilderList() { return entry_; } + /** + * repeated .WALEntry entry = 1; + */ public int getEntryCount() { return entry_.size(); } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { return entry_.get(index); } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( int index) { return entry_.get(index); } - + private void initFields() { entry_ = java.util.Collections.emptyList(); } @@ -12763,7 +15634,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getEntryCount(); i++) { if (!getEntry(i).isInitialized()) { memoizedIsInitialized = 0; @@ -12773,7 +15644,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12782,12 +15653,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < entry_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -12797,14 +15668,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12814,7 +15685,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj; - + boolean result = true; result = result && getEntryList() .equals(other.getEntryList()); @@ -12822,9 +15693,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getEntryCount() > 0) { @@ -12832,89 +15707,87 @@ public final class AdminProtos { hash = (53 * hash) + getEntryList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicateWALEntryRequest} + * + *
+     **
+     * Replicates the given entries. The guarantee is that the given entries
+     * will be durable on the slave cluster if this method returns without
+     * any exception.
+     * hbase.replication has to be set to true for this to work.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { @@ -12922,18 +15795,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12945,7 +15821,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (entryBuilder_ == null) { @@ -12956,20 +15832,20 @@ public final class AdminProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); if (!result.isInitialized()) { @@ -12977,17 +15853,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this); int from_bitField0_ = bitField0_; @@ -13003,7 +15869,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); @@ -13012,7 +15878,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; if (entryBuilder_ == null) { @@ -13044,7 +15910,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getEntryCount(); i++) { if (!getEntry(i).isInitialized()) { @@ -13054,42 +15920,26 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addEntry(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .WALEntry entry = 1; private java.util.List entry_ = java.util.Collections.emptyList(); @@ -13099,10 +15949,13 @@ public final class AdminProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_; - + + /** + * repeated .WALEntry entry = 1; + */ public java.util.List getEntryList() { if (entryBuilder_ == null) { return java.util.Collections.unmodifiableList(entry_); @@ -13110,6 +15963,9 @@ public final class AdminProtos { return entryBuilder_.getMessageList(); } } + /** + * repeated .WALEntry entry = 1; + */ public int getEntryCount() { if (entryBuilder_ == null) { return entry_.size(); @@ -13117,6 +15973,9 @@ public final class AdminProtos { return entryBuilder_.getCount(); } } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { if (entryBuilder_ == null) { return entry_.get(index); @@ -13124,6 +15983,9 @@ public final class AdminProtos { return entryBuilder_.getMessage(index); } } + /** + * repeated .WALEntry entry = 1; + */ public Builder setEntry( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (entryBuilder_ == null) { @@ -13138,6 +16000,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder setEntry( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (entryBuilder_ == null) { @@ -13149,6 +16014,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder addEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (entryBuilder_ == null) { if (value == null) { @@ -13162,6 +16030,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder addEntry( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (entryBuilder_ == null) { @@ -13176,6 +16047,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder addEntry( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (entryBuilder_ == null) { @@ -13187,6 +16061,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder addEntry( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (entryBuilder_ == null) { @@ -13198,6 +16075,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder addAllEntry( java.lang.Iterable values) { if (entryBuilder_ == null) { @@ -13209,6 +16089,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder clearEntry() { if (entryBuilder_ == null) { entry_ = java.util.Collections.emptyList(); @@ -13219,6 +16102,9 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public Builder removeEntry(int index) { if (entryBuilder_ == null) { ensureEntryIsMutable(); @@ -13229,10 +16115,16 @@ public final class AdminProtos { } return this; } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getEntryBuilder( int index) { return getEntryFieldBuilder().getBuilder(index); } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( int index) { if (entryBuilder_ == null) { @@ -13240,6 +16132,9 @@ public final class AdminProtos { return entryBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .WALEntry entry = 1; + */ public java.util.List getEntryOrBuilderList() { if (entryBuilder_ != null) { @@ -13248,15 +16143,24 @@ public final class AdminProtos { return java.util.Collections.unmodifiableList(entry_); } } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder() { return getEntryFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); } + /** + * repeated .WALEntry entry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder( int index) { return getEntryFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); } + /** + * repeated .WALEntry entry = 1; + */ public java.util.List getEntryBuilderList() { return getEntryFieldBuilder().getBuilderList(); @@ -13275,84 +16179,145 @@ public final class AdminProtos { } return entryBuilder_; } - + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) } - + static { defaultInstance = new ReplicateWALEntryRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) } - + public interface ReplicateWALEntryResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ReplicateWALEntryResponse} + */ public static final class ReplicateWALEntryResponse extends com.google.protobuf.GeneratedMessage implements ReplicateWALEntryResponseOrBuilder { // Use ReplicateWALEntryResponse.newBuilder() to construct. - private ReplicateWALEntryResponse(Builder builder) { + private ReplicateWALEntryResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicateWALEntryResponse(boolean noInit) {} - + private ReplicateWALEntryResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicateWALEntryResponse defaultInstance; public static ReplicateWALEntryResponse getDefaultInstance() { return defaultInstance; } - + public ReplicateWALEntryResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicateWALEntryResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicateWALEntryResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicateWALEntryResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13362,101 +16327,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicateWALEntryResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { @@ -13464,18 +16423,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13486,25 +16448,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); if (!result.isInitialized()) { @@ -13512,23 +16474,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); @@ -13537,122 +16489,173 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) } - + static { defaultInstance = new ReplicateWALEntryResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) } - + public interface RollWALWriterRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code RollWALWriterRequest} + */ public static final class RollWALWriterRequest extends com.google.protobuf.GeneratedMessage implements RollWALWriterRequestOrBuilder { // Use RollWALWriterRequest.newBuilder() to construct. - private RollWALWriterRequest(Builder builder) { + private RollWALWriterRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RollWALWriterRequest(boolean noInit) {} - + private RollWALWriterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RollWALWriterRequest defaultInstance; public static RollWALWriterRequest getDefaultInstance() { return defaultInstance; } - + public RollWALWriterRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RollWALWriterRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RollWALWriterRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RollWALWriterRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13662,101 +16665,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RollWALWriterRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { @@ -13764,18 +16761,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13786,25 +16786,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); if (!result.isInitialized()) { @@ -13812,23 +16812,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other); @@ -13837,118 +16827,223 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) } - + static { defaultInstance = new RollWALWriterRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RollWALWriterRequest) } - + public interface RollWALWriterResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated bytes regionToFlush = 1; + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ java.util.List getRegionToFlushList(); + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ int getRegionToFlushCount(); + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ com.google.protobuf.ByteString getRegionToFlush(int index); } + /** + * Protobuf type {@code RollWALWriterResponse} + */ public static final class RollWALWriterResponse extends com.google.protobuf.GeneratedMessage implements RollWALWriterResponseOrBuilder { // Use RollWALWriterResponse.newBuilder() to construct. - private RollWALWriterResponse(Builder builder) { + private RollWALWriterResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RollWALWriterResponse(boolean noInit) {} - + private RollWALWriterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RollWALWriterResponse defaultInstance; public static RollWALWriterResponse getDefaultInstance() { return defaultInstance; } - + public RollWALWriterResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RollWALWriterResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + regionToFlush_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RollWALWriterResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RollWALWriterResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated bytes regionToFlush = 1; public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; private java.util.List regionToFlush_; + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ public java.util.List getRegionToFlushList() { return regionToFlush_; } + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ public int getRegionToFlushCount() { return regionToFlush_.size(); } + /** + * repeated bytes regionToFlush = 1; + * + *
+     * A list of encoded name of regions to flush
+     * 
+ */ public com.google.protobuf.ByteString getRegionToFlush(int index) { return regionToFlush_.get(index); } - + private void initFields() { - regionToFlush_ = java.util.Collections.emptyList();; + regionToFlush_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -13957,12 +17052,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -13977,14 +17072,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13994,7 +17089,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj; - + boolean result = true; result = result && getRegionToFlushList() .equals(other.getRegionToFlushList()); @@ -14002,9 +17097,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getRegionToFlushCount() > 0) { @@ -14012,89 +17111,79 @@ public final class AdminProtos { hash = (53 * hash) + getRegionToFlushList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RollWALWriterResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { @@ -14102,18 +17191,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14124,27 +17216,27 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - regionToFlush_ = java.util.Collections.emptyList();; + regionToFlush_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); if (!result.isInitialized()) { @@ -14152,17 +17244,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this); int from_bitField0_ = bitField0_; @@ -14174,7 +17256,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other); @@ -14183,7 +17265,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; if (!other.regionToFlush_.isEmpty()) { @@ -14199,63 +17281,76 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureRegionToFlushIsMutable(); - regionToFlush_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated bytes regionToFlush = 1; - private java.util.List regionToFlush_ = java.util.Collections.emptyList();; + private java.util.List regionToFlush_ = java.util.Collections.emptyList(); private void ensureRegionToFlushIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { regionToFlush_ = new java.util.ArrayList(regionToFlush_); bitField0_ |= 0x00000001; } } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public java.util.List getRegionToFlushList() { return java.util.Collections.unmodifiableList(regionToFlush_); } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public int getRegionToFlushCount() { return regionToFlush_.size(); } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public com.google.protobuf.ByteString getRegionToFlush(int index) { return regionToFlush_.get(index); } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public Builder setRegionToFlush( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -14266,6 +17361,13 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public Builder addRegionToFlush(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -14275,6 +17377,13 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public Builder addAllRegionToFlush( java.lang.Iterable values) { ensureRegionToFlushIsMutable(); @@ -14282,92 +17391,188 @@ public final class AdminProtos { onChanged(); return this; } + /** + * repeated bytes regionToFlush = 1; + * + *
+       * A list of encoded name of regions to flush
+       * 
+ */ public Builder clearRegionToFlush() { - regionToFlush_ = java.util.Collections.emptyList();; + regionToFlush_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) } - + static { defaultInstance = new RollWALWriterResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RollWALWriterResponse) } - + public interface StopServerRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string reason = 1; + /** + * required string reason = 1; + */ boolean hasReason(); - String getReason(); + /** + * required string reason = 1; + */ + java.lang.String getReason(); + /** + * required string reason = 1; + */ + com.google.protobuf.ByteString + getReasonBytes(); } + /** + * Protobuf type {@code StopServerRequest} + */ public static final class StopServerRequest extends com.google.protobuf.GeneratedMessage implements StopServerRequestOrBuilder { // Use StopServerRequest.newBuilder() to construct. - private StopServerRequest(Builder builder) { + private StopServerRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private StopServerRequest(boolean noInit) {} - + private StopServerRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final StopServerRequest defaultInstance; public static StopServerRequest getDefaultInstance() { return defaultInstance; } - + public StopServerRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private StopServerRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + reason_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StopServerRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopServerRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string reason = 1; public static final int REASON_FIELD_NUMBER = 1; private java.lang.Object reason_; + /** + * required string reason = 1; + */ public boolean hasReason() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getReason() { + /** + * required string reason = 1; + */ + public java.lang.String getReason() { java.lang.Object ref = reason_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { reason_ = s; } return s; } } - private com.google.protobuf.ByteString getReasonBytes() { + /** + * required string reason = 1; + */ + public com.google.protobuf.ByteString + getReasonBytes() { java.lang.Object ref = reason_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); reason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { reason_ = ""; } @@ -14375,7 +17580,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasReason()) { memoizedIsInitialized = 0; return false; @@ -14383,7 +17588,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -14392,12 +17597,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -14407,14 +17612,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14424,7 +17629,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj; - + boolean result = true; result = result && (hasReason() == other.hasReason()); if (hasReason()) { @@ -14435,9 +17640,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasReason()) { @@ -14445,89 +17654,79 @@ public final class AdminProtos { hash = (53 * hash) + getReason().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code StopServerRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { @@ -14535,18 +17734,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14557,27 +17759,27 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); reason_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); if (!result.isInitialized()) { @@ -14585,17 +17787,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this); int from_bitField0_ = bitField0_; @@ -14608,7 +17800,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other); @@ -14617,16 +17809,18 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this; if (other.hasReason()) { - setReason(other.getReason()); + bitField0_ |= 0x00000001; + reason_ = other.reason_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasReason()) { @@ -14634,57 +17828,69 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - reason_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string reason = 1; private java.lang.Object reason_ = ""; + /** + * required string reason = 1; + */ public boolean hasReason() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getReason() { + /** + * required string reason = 1; + */ + public java.lang.String getReason() { + java.lang.Object ref = reason_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + reason_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string reason = 1; + */ + public com.google.protobuf.ByteString + getReasonBytes() { java.lang.Object ref = reason_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - reason_ = s; - return s; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + reason_ = b; + return b; } else { - return (String) ref; + return (com.google.protobuf.ByteString) ref; } } - public Builder setReason(String value) { + /** + * required string reason = 1; + */ + public Builder setReason( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -14693,95 +17899,167 @@ public final class AdminProtos { onChanged(); return this; } + /** + * required string reason = 1; + */ public Builder clearReason() { bitField0_ = (bitField0_ & ~0x00000001); reason_ = getDefaultInstance().getReason(); onChanged(); return this; } - void setReason(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string reason = 1; + */ + public Builder setReasonBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; reason_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:StopServerRequest) } - + static { defaultInstance = new StopServerRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:StopServerRequest) } - + public interface StopServerResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code StopServerResponse} + */ public static final class StopServerResponse extends com.google.protobuf.GeneratedMessage implements StopServerResponseOrBuilder { // Use StopServerResponse.newBuilder() to construct. - private StopServerResponse(Builder builder) { + private StopServerResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private StopServerResponse(boolean noInit) {} - + private StopServerResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final StopServerResponse defaultInstance; public static StopServerResponse getDefaultInstance() { return defaultInstance; } - + public StopServerResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private StopServerResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StopServerResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopServerResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14791,101 +18069,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code StopServerResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { @@ -14893,18 +18165,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14915,25 +18190,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); if (!result.isInitialized()) { @@ -14941,23 +18216,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other); @@ -14966,122 +18231,173 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:StopServerResponse) } - + static { defaultInstance = new StopServerResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:StopServerResponse) } - + public interface GetServerInfoRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code GetServerInfoRequest} + */ public static final class GetServerInfoRequest extends com.google.protobuf.GeneratedMessage implements GetServerInfoRequestOrBuilder { // Use GetServerInfoRequest.newBuilder() to construct. - private GetServerInfoRequest(Builder builder) { + private GetServerInfoRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetServerInfoRequest(boolean noInit) {} - + private GetServerInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetServerInfoRequest defaultInstance; public static GetServerInfoRequest getDefaultInstance() { return defaultInstance; } - + public GetServerInfoRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetServerInfoRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetServerInfoRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetServerInfoRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15091,101 +18407,95 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetServerInfoRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { @@ -15193,18 +18503,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -15215,25 +18528,25 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); if (!result.isInitialized()) { @@ -15241,23 +18554,13 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other); @@ -15266,120 +18569,220 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) } - + static { defaultInstance = new GetServerInfoRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetServerInfoRequest) } - + public interface ServerInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName serverName = 1; + /** + * required .ServerName serverName = 1; + */ boolean hasServerName(); + /** + * required .ServerName serverName = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + /** + * required .ServerName serverName = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - + // optional uint32 webuiPort = 2; + /** + * optional uint32 webuiPort = 2; + */ boolean hasWebuiPort(); + /** + * optional uint32 webuiPort = 2; + */ int getWebuiPort(); } + /** + * Protobuf type {@code ServerInfo} + */ public static final class ServerInfo extends com.google.protobuf.GeneratedMessage implements ServerInfoOrBuilder { // Use ServerInfo.newBuilder() to construct. - private ServerInfo(Builder builder) { + private ServerInfo(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ServerInfo(boolean noInit) {} - + private ServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ServerInfo defaultInstance; public static ServerInfo getDefaultInstance() { return defaultInstance; } - + public ServerInfo getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ServerInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = serverName_.toBuilder(); + } + serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverName_); + serverName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + webuiPort_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ServerInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ServerName serverName = 1; public static final int SERVERNAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + /** + * required .ServerName serverName = 1; + */ public boolean hasServerName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName serverName = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { return serverName_; } + /** + * required .ServerName serverName = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { return serverName_; } - + // optional uint32 webuiPort = 2; public static final int WEBUIPORT_FIELD_NUMBER = 2; private int webuiPort_; + /** + * optional uint32 webuiPort = 2; + */ public boolean hasWebuiPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 webuiPort = 2; + */ public int getWebuiPort() { return webuiPort_; } - + private void initFields() { serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); webuiPort_ = 0; @@ -15388,7 +18791,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServerName()) { memoizedIsInitialized = 0; return false; @@ -15400,7 +18803,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15412,12 +18815,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15431,14 +18834,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15448,7 +18851,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) obj; - + boolean result = true; result = result && (hasServerName() == other.hasServerName()); if (hasServerName()) { @@ -15464,9 +18867,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServerName()) { @@ -15478,89 +18885,79 @@ public final class AdminProtos { hash = (53 * hash) + getWebuiPort(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ServerInfo} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder { @@ -15568,18 +18965,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -15591,7 +18991,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverNameBuilder_ == null) { @@ -15604,20 +19004,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); if (!result.isInitialized()) { @@ -15625,17 +19025,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo(this); int from_bitField0_ = bitField0_; @@ -15656,7 +19046,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)other); @@ -15665,7 +19055,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) return this; if (other.hasServerName()) { @@ -15677,7 +19067,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServerName()) { @@ -15689,57 +19079,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - webuiPort_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName serverName = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + /** + * required .ServerName serverName = 1; + */ public boolean hasServerName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName serverName = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { return serverName_; @@ -15747,6 +19119,9 @@ public final class AdminProtos { return serverNameBuilder_.getMessage(); } } + /** + * required .ServerName serverName = 1; + */ public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (value == null) { @@ -15760,6 +19135,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName serverName = 1; + */ public Builder setServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverNameBuilder_ == null) { @@ -15771,6 +19149,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName serverName = 1; + */ public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -15787,6 +19168,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName serverName = 1; + */ public Builder clearServerName() { if (serverNameBuilder_ == null) { serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -15797,11 +19181,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName serverName = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerNameFieldBuilder().getBuilder(); } + /** + * required .ServerName serverName = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); @@ -15809,6 +19199,9 @@ public final class AdminProtos { return serverName_; } } + /** + * required .ServerName serverName = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { @@ -15822,89 +19215,194 @@ public final class AdminProtos { } return serverNameBuilder_; } - + // optional uint32 webuiPort = 2; private int webuiPort_ ; + /** + * optional uint32 webuiPort = 2; + */ public boolean hasWebuiPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 webuiPort = 2; + */ public int getWebuiPort() { return webuiPort_; } + /** + * optional uint32 webuiPort = 2; + */ public Builder setWebuiPort(int value) { bitField0_ |= 0x00000002; webuiPort_ = value; onChanged(); return this; } + /** + * optional uint32 webuiPort = 2; + */ public Builder clearWebuiPort() { bitField0_ = (bitField0_ & ~0x00000002); webuiPort_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ServerInfo) } - + static { defaultInstance = new ServerInfo(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ServerInfo) } - + public interface GetServerInfoResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerInfo serverInfo = 1; + /** + * required .ServerInfo serverInfo = 1; + */ boolean hasServerInfo(); + /** + * required .ServerInfo serverInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo(); + /** + * required .ServerInfo serverInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder(); } + /** + * Protobuf type {@code GetServerInfoResponse} + */ public static final class GetServerInfoResponse extends com.google.protobuf.GeneratedMessage implements GetServerInfoResponseOrBuilder { // Use GetServerInfoResponse.newBuilder() to construct. - private GetServerInfoResponse(Builder builder) { + private GetServerInfoResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetServerInfoResponse(boolean noInit) {} - + private GetServerInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetServerInfoResponse defaultInstance; public static GetServerInfoResponse getDefaultInstance() { return defaultInstance; } - + public GetServerInfoResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetServerInfoResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = serverInfo_.toBuilder(); + } + serverInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverInfo_); + serverInfo_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetServerInfoResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetServerInfoResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ServerInfo serverInfo = 1; public static final int SERVERINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_; + /** + * required .ServerInfo serverInfo = 1; + */ public boolean hasServerInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerInfo serverInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { return serverInfo_; } + /** + * required .ServerInfo serverInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { return serverInfo_; } - + private void initFields() { serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); } @@ -15912,7 +19410,7 @@ public final class AdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServerInfo()) { memoizedIsInitialized = 0; return false; @@ -15924,7 +19422,7 @@ public final class AdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15933,12 +19431,12 @@ public final class AdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15948,14 +19446,14 @@ public final class AdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15965,7 +19463,7 @@ public final class AdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj; - + boolean result = true; result = result && (hasServerInfo() == other.hasServerInfo()); if (hasServerInfo()) { @@ -15976,9 +19474,13 @@ public final class AdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServerInfo()) { @@ -15986,89 +19488,79 @@ public final class AdminProtos { hash = (53 * hash) + getServerInfo().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetServerInfoResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { @@ -16076,18 +19568,21 @@ public final class AdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16099,7 +19594,7 @@ public final class AdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverInfoBuilder_ == null) { @@ -16110,20 +19605,20 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); if (!result.isInitialized()) { @@ -16131,17 +19626,7 @@ public final class AdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this); int from_bitField0_ = bitField0_; @@ -16158,7 +19643,7 @@ public final class AdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other); @@ -16167,7 +19652,7 @@ public final class AdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; if (other.hasServerInfo()) { @@ -16176,7 +19661,7 @@ public final class AdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServerInfo()) { @@ -16188,52 +19673,39 @@ public final class AdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(); - if (hasServerInfo()) { - subBuilder.mergeFrom(getServerInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerInfo(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerInfo serverInfo = 1; private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_; + /** + * required .ServerInfo serverInfo = 1; + */ public boolean hasServerInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerInfo serverInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { if (serverInfoBuilder_ == null) { return serverInfo_; @@ -16241,6 +19713,9 @@ public final class AdminProtos { return serverInfoBuilder_.getMessage(); } } + /** + * required .ServerInfo serverInfo = 1; + */ public Builder setServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { if (serverInfoBuilder_ == null) { if (value == null) { @@ -16254,6 +19729,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerInfo serverInfo = 1; + */ public Builder setServerInfo( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder builderForValue) { if (serverInfoBuilder_ == null) { @@ -16265,6 +19743,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerInfo serverInfo = 1; + */ public Builder mergeServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { if (serverInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -16281,6 +19762,9 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerInfo serverInfo = 1; + */ public Builder clearServerInfo() { if (serverInfoBuilder_ == null) { serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); @@ -16291,11 +19775,17 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerInfo serverInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder getServerInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerInfoFieldBuilder().getBuilder(); } + /** + * required .ServerInfo serverInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { if (serverInfoBuilder_ != null) { return serverInfoBuilder_.getMessageOrBuilder(); @@ -16303,6 +19793,9 @@ public final class AdminProtos { return serverInfo_; } } + /** + * required .ServerInfo serverInfo = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> getServerInfoFieldBuilder() { @@ -16316,90 +19809,132 @@ public final class AdminProtos { } return serverInfoBuilder_; } - + // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) } - + static { defaultInstance = new GetServerInfoResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetServerInfoResponse) } - + + /** + * Protobuf service {@code AdminService} + */ public static abstract class AdminService implements com.google.protobuf.Service { protected AdminService() {} - + public interface Interface { + /** + * rpc getRegionInfo(.GetRegionInfoRequest) returns (.GetRegionInfoResponse); + */ public abstract void getRegionInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getStoreFile(.GetStoreFileRequest) returns (.GetStoreFileResponse); + */ public abstract void getStoreFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getOnlineRegion(.GetOnlineRegionRequest) returns (.GetOnlineRegionResponse); + */ public abstract void getOnlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc openRegion(.OpenRegionRequest) returns (.OpenRegionResponse); + */ public abstract void openRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc closeRegion(.CloseRegionRequest) returns (.CloseRegionResponse); + */ public abstract void closeRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc flushRegion(.FlushRegionRequest) returns (.FlushRegionResponse); + */ public abstract void flushRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc splitRegion(.SplitRegionRequest) returns (.SplitRegionResponse); + */ public abstract void splitRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc compactRegion(.CompactRegionRequest) returns (.CompactRegionResponse); + */ public abstract void compactRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc mergeRegions(.MergeRegionsRequest) returns (.MergeRegionsResponse); + */ public abstract void mergeRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc replicateWALEntry(.ReplicateWALEntryRequest) returns (.ReplicateWALEntryResponse); + */ public abstract void replicateWALEntry( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc rollWALWriter(.RollWALWriterRequest) returns (.RollWALWriterResponse); + */ public abstract void rollWALWriter( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getServerInfo(.GetServerInfoRequest) returns (.GetServerInfoResponse); + */ public abstract void getServerInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc stopServer(.StopServerRequest) returns (.StopServerResponse); + */ public abstract void stopServer( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new AdminService() { @@ -16410,7 +19945,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.getRegionInfo(controller, request, done); } - + @java.lang.Override public void getStoreFile( com.google.protobuf.RpcController controller, @@ -16418,7 +19953,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.getStoreFile(controller, request, done); } - + @java.lang.Override public void getOnlineRegion( com.google.protobuf.RpcController controller, @@ -16426,7 +19961,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.getOnlineRegion(controller, request, done); } - + @java.lang.Override public void openRegion( com.google.protobuf.RpcController controller, @@ -16434,7 +19969,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.openRegion(controller, request, done); } - + @java.lang.Override public void closeRegion( com.google.protobuf.RpcController controller, @@ -16442,7 +19977,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.closeRegion(controller, request, done); } - + @java.lang.Override public void flushRegion( com.google.protobuf.RpcController controller, @@ -16450,7 +19985,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.flushRegion(controller, request, done); } - + @java.lang.Override public void splitRegion( com.google.protobuf.RpcController controller, @@ -16458,7 +19993,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.splitRegion(controller, request, done); } - + @java.lang.Override public void compactRegion( com.google.protobuf.RpcController controller, @@ -16466,7 +20001,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.compactRegion(controller, request, done); } - + @java.lang.Override public void mergeRegions( com.google.protobuf.RpcController controller, @@ -16474,7 +20009,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.mergeRegions(controller, request, done); } - + @java.lang.Override public void replicateWALEntry( com.google.protobuf.RpcController controller, @@ -16482,7 +20017,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.replicateWALEntry(controller, request, done); } - + @java.lang.Override public void rollWALWriter( com.google.protobuf.RpcController controller, @@ -16490,7 +20025,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.rollWALWriter(controller, request, done); } - + @java.lang.Override public void getServerInfo( com.google.protobuf.RpcController controller, @@ -16498,7 +20033,7 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.getServerInfo(controller, request, done); } - + @java.lang.Override public void stopServer( com.google.protobuf.RpcController controller, @@ -16506,10 +20041,10 @@ public final class AdminProtos { com.google.protobuf.RpcCallback done) { impl.stopServer(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -16517,7 +20052,7 @@ public final class AdminProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -16559,7 +20094,7 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -16599,7 +20134,7 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -16639,75 +20174,114 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc getRegionInfo(.GetRegionInfoRequest) returns (.GetRegionInfoResponse); + */ public abstract void getRegionInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getStoreFile(.GetStoreFileRequest) returns (.GetStoreFileResponse); + */ public abstract void getStoreFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getOnlineRegion(.GetOnlineRegionRequest) returns (.GetOnlineRegionResponse); + */ public abstract void getOnlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc openRegion(.OpenRegionRequest) returns (.OpenRegionResponse); + */ public abstract void openRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc closeRegion(.CloseRegionRequest) returns (.CloseRegionResponse); + */ public abstract void closeRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc flushRegion(.FlushRegionRequest) returns (.FlushRegionResponse); + */ public abstract void flushRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc splitRegion(.SplitRegionRequest) returns (.SplitRegionResponse); + */ public abstract void splitRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc compactRegion(.CompactRegionRequest) returns (.CompactRegionResponse); + */ public abstract void compactRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc mergeRegions(.MergeRegionsRequest) returns (.MergeRegionsResponse); + */ public abstract void mergeRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc replicateWALEntry(.ReplicateWALEntryRequest) returns (.ReplicateWALEntryResponse); + */ public abstract void replicateWALEntry( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc rollWALWriter(.RollWALWriterRequest) returns (.RollWALWriterResponse); + */ public abstract void rollWALWriter( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getServerInfo(.GetServerInfoRequest) returns (.GetServerInfoResponse); + */ public abstract void getServerInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc stopServer(.StopServerRequest) returns (.StopServerResponse); + */ public abstract void stopServer( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -16717,7 +20291,7 @@ public final class AdminProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -16799,7 +20373,7 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -16839,7 +20413,7 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -16879,23 +20453,23 @@ public final class AdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void getRegionInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, @@ -16910,7 +20484,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance())); } - + public void getStoreFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, @@ -16925,7 +20499,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance())); } - + public void getOnlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, @@ -16940,7 +20514,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance())); } - + public void openRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, @@ -16955,7 +20529,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance())); } - + public void closeRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, @@ -16970,7 +20544,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance())); } - + public void flushRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, @@ -16985,7 +20559,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance())); } - + public void splitRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, @@ -17000,7 +20574,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance())); } - + public void compactRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, @@ -17015,7 +20589,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance())); } - + public void mergeRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request, @@ -17030,7 +20604,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance())); } - + public void replicateWALEntry( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, @@ -17045,7 +20619,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); } - + public void rollWALWriter( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, @@ -17060,7 +20634,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance())); } - + public void getServerInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, @@ -17075,7 +20649,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance())); } - + public void stopServer( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, @@ -17091,86 +20665,86 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse mergeRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) @@ -17181,8 +20755,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) @@ -17193,8 +20767,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) @@ -17205,8 +20779,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) @@ -17217,8 +20791,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) @@ -17229,8 +20803,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) @@ -17241,8 +20815,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) @@ -17253,8 +20827,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) @@ -17265,8 +20839,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse mergeRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest request) @@ -17277,8 +20851,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) @@ -17289,8 +20863,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) @@ -17301,8 +20875,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) @@ -17313,8 +20887,8 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) @@ -17325,10 +20899,12 @@ public final class AdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:AdminService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_GetRegionInfoRequest_descriptor; private static @@ -17494,7 +21070,7 @@ public final class AdminProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetServerInfoResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -17596,265 +21172,199 @@ public final class AdminProtos { internal_static_GetRegionInfoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRegionInfoRequest_descriptor, - new java.lang.String[] { "Region", "CompactionState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); + new java.lang.String[] { "Region", "CompactionState", }); internal_static_GetRegionInfoResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_GetRegionInfoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRegionInfoResponse_descriptor, - new java.lang.String[] { "RegionInfo", "CompactionState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); + new java.lang.String[] { "RegionInfo", "CompactionState", }); internal_static_GetStoreFileRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_GetStoreFileRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetStoreFileRequest_descriptor, - new java.lang.String[] { "Region", "Family", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); + new java.lang.String[] { "Region", "Family", }); internal_static_GetStoreFileResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_GetStoreFileResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetStoreFileResponse_descriptor, - new java.lang.String[] { "StoreFile", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); + new java.lang.String[] { "StoreFile", }); internal_static_GetOnlineRegionRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_GetOnlineRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetOnlineRegionRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); + new java.lang.String[] { }); internal_static_GetOnlineRegionResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_GetOnlineRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetOnlineRegionResponse_descriptor, - new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); + new java.lang.String[] { "RegionInfo", }); internal_static_OpenRegionRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_OpenRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OpenRegionRequest_descriptor, - new java.lang.String[] { "OpenInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); + new java.lang.String[] { "OpenInfo", }); internal_static_OpenRegionRequest_RegionOpenInfo_descriptor = internal_static_OpenRegionRequest_descriptor.getNestedTypes().get(0); internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OpenRegionRequest_RegionOpenInfo_descriptor, - new java.lang.String[] { "Region", "VersionOfOfflineNode", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); + new java.lang.String[] { "Region", "VersionOfOfflineNode", }); internal_static_OpenRegionResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_OpenRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OpenRegionResponse_descriptor, - new java.lang.String[] { "OpeningState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); + new java.lang.String[] { "OpeningState", }); internal_static_CloseRegionRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_CloseRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloseRegionRequest_descriptor, - new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); + new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", }); internal_static_CloseRegionResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_CloseRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloseRegionResponse_descriptor, - new java.lang.String[] { "Closed", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); + new java.lang.String[] { "Closed", }); internal_static_FlushRegionRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_FlushRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FlushRegionRequest_descriptor, - new java.lang.String[] { "Region", "IfOlderThanTs", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); + new java.lang.String[] { "Region", "IfOlderThanTs", }); internal_static_FlushRegionResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_FlushRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FlushRegionResponse_descriptor, - new java.lang.String[] { "LastFlushTime", "Flushed", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); + new java.lang.String[] { "LastFlushTime", "Flushed", }); internal_static_SplitRegionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_SplitRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SplitRegionRequest_descriptor, - new java.lang.String[] { "Region", "SplitPoint", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); + new java.lang.String[] { "Region", "SplitPoint", }); internal_static_SplitRegionResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_SplitRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SplitRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_CompactRegionRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_CompactRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CompactRegionRequest_descriptor, - new java.lang.String[] { "Region", "Major", "Family", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); + new java.lang.String[] { "Region", "Major", "Family", }); internal_static_CompactRegionResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_CompactRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CompactRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_MergeRegionsRequest_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_MergeRegionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MergeRegionsRequest_descriptor, - new java.lang.String[] { "RegionA", "RegionB", "Forcible", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class); + new java.lang.String[] { "RegionA", "RegionB", "Forcible", }); internal_static_MergeRegionsResponse_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_MergeRegionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MergeRegionsResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class); + new java.lang.String[] { }); internal_static_UUID_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UUID_descriptor, - new java.lang.String[] { "LeastSigBits", "MostSigBits", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); + new java.lang.String[] { "LeastSigBits", "MostSigBits", }); internal_static_WALEntry_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_WALEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_descriptor, - new java.lang.String[] { "Key", "Edit", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); + new java.lang.String[] { "Key", "Edit", }); internal_static_WALEntry_WALKey_descriptor = internal_static_WALEntry_descriptor.getNestedTypes().get(0); internal_static_WALEntry_WALKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALKey_descriptor, - new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }); internal_static_WALEntry_WALEdit_descriptor = internal_static_WALEntry_descriptor.getNestedTypes().get(1); internal_static_WALEntry_WALEdit_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALEdit_descriptor, - new java.lang.String[] { "KeyValueBytes", "FamilyScope", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); + new java.lang.String[] { "KeyValueBytes", "FamilyScope", }); internal_static_WALEntry_WALEdit_FamilyScope_descriptor = internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALEdit_FamilyScope_descriptor, - new java.lang.String[] { "Family", "ScopeType", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + new java.lang.String[] { "Family", "ScopeType", }); internal_static_ReplicateWALEntryRequest_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryRequest_descriptor, - new java.lang.String[] { "Entry", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); + new java.lang.String[] { "Entry", }); internal_static_ReplicateWALEntryResponse_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); + new java.lang.String[] { }); internal_static_RollWALWriterRequest_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_RollWALWriterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); + new java.lang.String[] { }); internal_static_RollWALWriterResponse_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_RollWALWriterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterResponse_descriptor, - new java.lang.String[] { "RegionToFlush", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); + new java.lang.String[] { "RegionToFlush", }); internal_static_StopServerRequest_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_StopServerRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerRequest_descriptor, - new java.lang.String[] { "Reason", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); + new java.lang.String[] { "Reason", }); internal_static_StopServerResponse_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_StopServerResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); + new java.lang.String[] { }); internal_static_GetServerInfoRequest_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_GetServerInfoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); + new java.lang.String[] { }); internal_static_ServerInfo_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_ServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ServerInfo_descriptor, - new java.lang.String[] { "ServerName", "WebuiPort", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); + new java.lang.String[] { "ServerName", "WebuiPort", }); internal_static_GetServerInfoResponse_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_GetServerInfoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoResponse_descriptor, - new java.lang.String[] { "ServerInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); + new java.lang.String[] { "ServerInfo", }); return null; } }; @@ -17864,6 +21374,6 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java index ad1dbeb..9b300ae 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java @@ -10,104 +10,289 @@ public final class AggregateProtos { } public interface AggregateArgumentOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string interpreterClassName = 1; + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ boolean hasInterpreterClassName(); - String getInterpreterClassName(); - + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + java.lang.String getInterpreterClassName(); + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + com.google.protobuf.ByteString + getInterpreterClassNameBytes(); + // required .Scan scan = 2; + /** + * required .Scan scan = 2; + */ boolean hasScan(); + /** + * required .Scan scan = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * required .Scan scan = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - + // optional bytes interpreterSpecificBytes = 3; + /** + * optional bytes interpreterSpecificBytes = 3; + */ boolean hasInterpreterSpecificBytes(); + /** + * optional bytes interpreterSpecificBytes = 3; + */ com.google.protobuf.ByteString getInterpreterSpecificBytes(); } + /** + * Protobuf type {@code AggregateArgument} + */ public static final class AggregateArgument extends com.google.protobuf.GeneratedMessage implements AggregateArgumentOrBuilder { // Use AggregateArgument.newBuilder() to construct. - private AggregateArgument(Builder builder) { + private AggregateArgument(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AggregateArgument(boolean noInit) {} - + private AggregateArgument(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AggregateArgument defaultInstance; public static AggregateArgument getDefaultInstance() { return defaultInstance; } - + public AggregateArgument getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AggregateArgument( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + interpreterClassName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + bitField0_ |= 0x00000004; + interpreterSpecificBytes_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AggregateArgument parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AggregateArgument(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string interpreterClassName = 1; public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1; private java.lang.Object interpreterClassName_; + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ public boolean hasInterpreterClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getInterpreterClassName() { + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + public java.lang.String getInterpreterClassName() { java.lang.Object ref = interpreterClassName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { interpreterClassName_ = s; } return s; } } - private com.google.protobuf.ByteString getInterpreterClassNameBytes() { + /** + * required string interpreterClassName = 1; + * + *
+     ** The argument passed to the AggregateService consists of three parts
+     *  (1) the (canonical) classname of the ColumnInterpreter implementation
+     *  (2) the Scan query
+     *  (3) any bytes required to construct the ColumnInterpreter object
+     *      properly
+     * 
+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { java.lang.Object ref = interpreterClassName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); interpreterClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // required .Scan scan = 2; public static final int SCAN_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * required .Scan scan = 2; + */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { return scan_; } + /** + * required .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { return scan_; } - + // optional bytes interpreterSpecificBytes = 3; public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3; private com.google.protobuf.ByteString interpreterSpecificBytes_; + /** + * optional bytes interpreterSpecificBytes = 3; + */ public boolean hasInterpreterSpecificBytes() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes interpreterSpecificBytes = 3; + */ public com.google.protobuf.ByteString getInterpreterSpecificBytes() { return interpreterSpecificBytes_; } - + private void initFields() { interpreterClassName_ = ""; scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); @@ -117,7 +302,7 @@ public final class AggregateProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasInterpreterClassName()) { memoizedIsInitialized = 0; return false; @@ -133,7 +318,7 @@ public final class AggregateProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -148,12 +333,12 @@ public final class AggregateProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -171,14 +356,14 @@ public final class AggregateProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -188,7 +373,7 @@ public final class AggregateProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj; - + boolean result = true; result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); if (hasInterpreterClassName()) { @@ -209,9 +394,13 @@ public final class AggregateProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInterpreterClassName()) { @@ -227,89 +416,79 @@ public final class AggregateProtos { hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AggregateArgument} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder { @@ -317,18 +496,21 @@ public final class AggregateProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -340,7 +522,7 @@ public final class AggregateProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); interpreterClassName_ = ""; @@ -355,20 +537,20 @@ public final class AggregateProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); if (!result.isInitialized()) { @@ -376,17 +558,7 @@ public final class AggregateProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this); int from_bitField0_ = bitField0_; @@ -411,7 +583,7 @@ public final class AggregateProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other); @@ -420,11 +592,13 @@ public final class AggregateProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this; if (other.hasInterpreterClassName()) { - setInterpreterClassName(other.getInterpreterClassName()); + bitField0_ |= 0x00000001; + interpreterClassName_ = other.interpreterClassName_; + onChanged(); } if (other.hasScan()) { mergeScan(other.getScan()); @@ -435,7 +609,7 @@ public final class AggregateProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasInterpreterClassName()) { @@ -451,71 +625,101 @@ public final class AggregateProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - interpreterClassName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); - if (hasScan()) { - subBuilder.mergeFrom(getScan()); - } - input.readMessage(subBuilder, extensionRegistry); - setScan(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - interpreterSpecificBytes_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string interpreterClassName = 1; private java.lang.Object interpreterClassName_ = ""; + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ public boolean hasInterpreterClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getInterpreterClassName() { + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public java.lang.String getInterpreterClassName() { java.lang.Object ref = interpreterClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); interpreterClassName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setInterpreterClassName(String value) { + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public Builder setInterpreterClassName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -524,25 +728,58 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } - void setInterpreterClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string interpreterClassName = 1; + * + *
+       ** The argument passed to the AggregateService consists of three parts
+       *  (1) the (canonical) classname of the ColumnInterpreter implementation
+       *  (2) the Scan query
+       *  (3) any bytes required to construct the ColumnInterpreter object
+       *      properly
+       * 
+ */ + public Builder setInterpreterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; interpreterClassName_ = value; onChanged(); + return this; } - + // required .Scan scan = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * required .Scan scan = 2; + */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { if (scanBuilder_ == null) { return scan_; @@ -550,6 +787,9 @@ public final class AggregateProtos { return scanBuilder_.getMessage(); } } + /** + * required .Scan scan = 2; + */ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (value == null) { @@ -563,6 +803,9 @@ public final class AggregateProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Scan scan = 2; + */ public Builder setScan( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { if (scanBuilder_ == null) { @@ -574,6 +817,9 @@ public final class AggregateProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Scan scan = 2; + */ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -590,6 +836,9 @@ public final class AggregateProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Scan scan = 2; + */ public Builder clearScan() { if (scanBuilder_ == null) { scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); @@ -600,11 +849,17 @@ public final class AggregateProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { bitField0_ |= 0x00000002; onChanged(); return getScanFieldBuilder().getBuilder(); } + /** + * required .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { if (scanBuilder_ != null) { return scanBuilder_.getMessageOrBuilder(); @@ -612,6 +867,9 @@ public final class AggregateProtos { return scan_; } } + /** + * required .Scan scan = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { @@ -625,15 +883,24 @@ public final class AggregateProtos { } return scanBuilder_; } - + // optional bytes interpreterSpecificBytes = 3; private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes interpreterSpecificBytes = 3; + */ public boolean hasInterpreterSpecificBytes() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes interpreterSpecificBytes = 3; + */ public com.google.protobuf.ByteString getInterpreterSpecificBytes() { return interpreterSpecificBytes_; } + /** + * optional bytes interpreterSpecificBytes = 3; + */ public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -643,102 +910,267 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * optional bytes interpreterSpecificBytes = 3; + */ public Builder clearInterpreterSpecificBytes() { bitField0_ = (bitField0_ & ~0x00000004); interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:AggregateArgument) } - + static { defaultInstance = new AggregateArgument(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AggregateArgument) } - + public interface AggregateResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated bytes firstPart = 1; + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ java.util.List getFirstPartList(); + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ int getFirstPartCount(); + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ com.google.protobuf.ByteString getFirstPart(int index); - + // optional bytes secondPart = 2; + /** + * optional bytes secondPart = 2; + */ boolean hasSecondPart(); + /** + * optional bytes secondPart = 2; + */ com.google.protobuf.ByteString getSecondPart(); } + /** + * Protobuf type {@code AggregateResponse} + */ public static final class AggregateResponse extends com.google.protobuf.GeneratedMessage implements AggregateResponseOrBuilder { // Use AggregateResponse.newBuilder() to construct. - private AggregateResponse(Builder builder) { + private AggregateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AggregateResponse(boolean noInit) {} - + private AggregateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AggregateResponse defaultInstance; public static AggregateResponse getDefaultInstance() { return defaultInstance; } - + public AggregateResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AggregateResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + firstPart_.add(input.readBytes()); + break; + } + case 18: { + bitField0_ |= 0x00000001; + secondPart_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = java.util.Collections.unmodifiableList(firstPart_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AggregateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AggregateResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // repeated bytes firstPart = 1; public static final int FIRSTPART_FIELD_NUMBER = 1; private java.util.List firstPart_; + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ public java.util.List getFirstPartList() { return firstPart_; } + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ public int getFirstPartCount() { return firstPart_.size(); } + /** + * repeated bytes firstPart = 1; + * + *
+     **
+     * The AggregateService methods all have a response that either is a Pair
+     * or a simple object. When it is a Pair both firstPart and secondPart
+     * have defined values (and the secondPart is not present in the response 
+     * when the response is not a pair). Refer to the AggregateImplementation 
+     * class for an overview of the AggregateResponse object constructions. 
+     * 
+ */ public com.google.protobuf.ByteString getFirstPart(int index) { return firstPart_.get(index); } - + // optional bytes secondPart = 2; public static final int SECONDPART_FIELD_NUMBER = 2; private com.google.protobuf.ByteString secondPart_; + /** + * optional bytes secondPart = 2; + */ public boolean hasSecondPart() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes secondPart = 2; + */ public com.google.protobuf.ByteString getSecondPart() { return secondPart_; } - + private void initFields() { - firstPart_ = java.util.Collections.emptyList();; + firstPart_ = java.util.Collections.emptyList(); secondPart_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -750,12 +1182,12 @@ public final class AggregateProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -774,14 +1206,14 @@ public final class AggregateProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -791,7 +1223,7 @@ public final class AggregateProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj; - + boolean result = true; result = result && getFirstPartList() .equals(other.getFirstPartList()); @@ -804,9 +1236,13 @@ public final class AggregateProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getFirstPartCount() > 0) { @@ -818,89 +1254,79 @@ public final class AggregateProtos { hash = (53 * hash) + getSecondPart().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AggregateResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder { @@ -908,18 +1334,21 @@ public final class AggregateProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -930,29 +1359,29 @@ public final class AggregateProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - firstPart_ = java.util.Collections.emptyList();; + firstPart_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); secondPart_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); if (!result.isInitialized()) { @@ -960,17 +1389,7 @@ public final class AggregateProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this); int from_bitField0_ = bitField0_; @@ -988,7 +1407,7 @@ public final class AggregateProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other); @@ -997,7 +1416,7 @@ public final class AggregateProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this; if (!other.firstPart_.isEmpty()) { @@ -1016,68 +1435,96 @@ public final class AggregateProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureFirstPartIsMutable(); - firstPart_.add(input.readBytes()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - secondPart_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated bytes firstPart = 1; - private java.util.List firstPart_ = java.util.Collections.emptyList();; + private java.util.List firstPart_ = java.util.Collections.emptyList(); private void ensureFirstPartIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { firstPart_ = new java.util.ArrayList(firstPart_); bitField0_ |= 0x00000001; } } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public java.util.List getFirstPartList() { return java.util.Collections.unmodifiableList(firstPart_); } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public int getFirstPartCount() { return firstPart_.size(); } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public com.google.protobuf.ByteString getFirstPart(int index) { return firstPart_.get(index); } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public Builder setFirstPart( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -1088,6 +1535,18 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public Builder addFirstPart(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1097,6 +1556,18 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public Builder addAllFirstPart( java.lang.Iterable values) { ensureFirstPartIsMutable(); @@ -1104,21 +1575,42 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * repeated bytes firstPart = 1; + * + *
+       **
+       * The AggregateService methods all have a response that either is a Pair
+       * or a simple object. When it is a Pair both firstPart and secondPart
+       * have defined values (and the secondPart is not present in the response 
+       * when the response is not a pair). Refer to the AggregateImplementation 
+       * class for an overview of the AggregateResponse object constructions. 
+       * 
+ */ public Builder clearFirstPart() { - firstPart_ = java.util.Collections.emptyList();; + firstPart_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // optional bytes secondPart = 2; private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes secondPart = 2; + */ public boolean hasSecondPart() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes secondPart = 2; + */ public com.google.protobuf.ByteString getSecondPart() { return secondPart_; } + /** + * optional bytes secondPart = 2; + */ public Builder setSecondPart(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1128,66 +1620,98 @@ public final class AggregateProtos { onChanged(); return this; } + /** + * optional bytes secondPart = 2; + */ public Builder clearSecondPart() { bitField0_ = (bitField0_ & ~0x00000002); secondPart_ = getDefaultInstance().getSecondPart(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:AggregateResponse) } - + static { defaultInstance = new AggregateResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AggregateResponse) } - + + /** + * Protobuf service {@code AggregateService} + * + *
+   ** Refer to the AggregateImplementation class for an overview of the 
+   *  AggregateService method implementations and their functionality.
+   * 
+ */ public static abstract class AggregateService implements com.google.protobuf.Service { protected AggregateService() {} - + public interface Interface { + /** + * rpc getMax(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMax( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getMin(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMin( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getSum(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getSum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getRowNum(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getRowNum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getAvg(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getAvg( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getStd(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getStd( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getMedian(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMedian( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new AggregateService() { @@ -1198,7 +1722,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getMax(controller, request, done); } - + @java.lang.Override public void getMin( com.google.protobuf.RpcController controller, @@ -1206,7 +1730,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getMin(controller, request, done); } - + @java.lang.Override public void getSum( com.google.protobuf.RpcController controller, @@ -1214,7 +1738,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getSum(controller, request, done); } - + @java.lang.Override public void getRowNum( com.google.protobuf.RpcController controller, @@ -1222,7 +1746,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getRowNum(controller, request, done); } - + @java.lang.Override public void getAvg( com.google.protobuf.RpcController controller, @@ -1230,7 +1754,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getAvg(controller, request, done); } - + @java.lang.Override public void getStd( com.google.protobuf.RpcController controller, @@ -1238,7 +1762,7 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getStd(controller, request, done); } - + @java.lang.Override public void getMedian( com.google.protobuf.RpcController controller, @@ -1246,10 +1770,10 @@ public final class AggregateProtos { com.google.protobuf.RpcCallback done) { impl.getMedian(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -1257,7 +1781,7 @@ public final class AggregateProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -1287,7 +1811,7 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1315,7 +1839,7 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1343,45 +1867,66 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc getMax(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMax( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getMin(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMin( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getSum(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getSum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getRowNum(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getRowNum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getAvg(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getAvg( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getStd(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getStd( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getMedian(.AggregateArgument) returns (.AggregateResponse); + */ public abstract void getMedian( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -1391,7 +1936,7 @@ public final class AggregateProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -1443,7 +1988,7 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1471,7 +2016,7 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1499,23 +2044,23 @@ public final class AggregateProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void getMax( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1530,7 +2075,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getMin( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1545,7 +2090,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getSum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1560,7 +2105,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getRowNum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1575,7 +2120,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getAvg( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1590,7 +2135,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getStd( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1605,7 +2150,7 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } - + public void getMedian( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, @@ -1621,56 +2166,56 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1681,8 +2226,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1693,8 +2238,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1705,8 +2250,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1717,8 +2262,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1729,8 +2274,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1741,8 +2286,8 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) @@ -1753,10 +2298,12 @@ public final class AggregateProtos { request, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:AggregateService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_AggregateArgument_descriptor; private static @@ -1767,7 +2314,7 @@ public final class AggregateProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AggregateResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1804,17 +2351,13 @@ public final class AggregateProtos { internal_static_AggregateArgument_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AggregateArgument_descriptor, - new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", }, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class); + new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", }); internal_static_AggregateResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_AggregateResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AggregateResponse_descriptor, - new java.lang.String[] { "FirstPart", "SecondPart", }, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); + new java.lang.String[] { "FirstPart", "SecondPart", }); return null; } }; @@ -1824,6 +2367,6 @@ public final class AggregateProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java index 5a05473..07b1bc4 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java @@ -10,78 +10,191 @@ public final class AuthenticationProtos { } public interface AuthenticationKeyOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 id = 1; + /** + * required int32 id = 1; + */ boolean hasId(); + /** + * required int32 id = 1; + */ int getId(); - + // required int64 expirationDate = 2; + /** + * required int64 expirationDate = 2; + */ boolean hasExpirationDate(); + /** + * required int64 expirationDate = 2; + */ long getExpirationDate(); - + // required bytes key = 3; + /** + * required bytes key = 3; + */ boolean hasKey(); + /** + * required bytes key = 3; + */ com.google.protobuf.ByteString getKey(); } + /** + * Protobuf type {@code AuthenticationKey} + */ public static final class AuthenticationKey extends com.google.protobuf.GeneratedMessage implements AuthenticationKeyOrBuilder { // Use AuthenticationKey.newBuilder() to construct. - private AuthenticationKey(Builder builder) { + private AuthenticationKey(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AuthenticationKey(boolean noInit) {} - + private AuthenticationKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AuthenticationKey defaultInstance; public static AuthenticationKey getDefaultInstance() { return defaultInstance; } - + public AuthenticationKey getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AuthenticationKey( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + id_ = input.readInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + expirationDate_ = input.readInt64(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + key_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AuthenticationKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AuthenticationKey(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 id = 1; public static final int ID_FIELD_NUMBER = 1; private int id_; + /** + * required int32 id = 1; + */ public boolean hasId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 id = 1; + */ public int getId() { return id_; } - + // required int64 expirationDate = 2; public static final int EXPIRATIONDATE_FIELD_NUMBER = 2; private long expirationDate_; + /** + * required int64 expirationDate = 2; + */ public boolean hasExpirationDate() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int64 expirationDate = 2; + */ public long getExpirationDate() { return expirationDate_; } - + // required bytes key = 3; public static final int KEY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString key_; + /** + * required bytes key = 3; + */ public boolean hasKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes key = 3; + */ public com.google.protobuf.ByteString getKey() { return key_; } - + private void initFields() { id_ = 0; expirationDate_ = 0L; @@ -91,7 +204,7 @@ public final class AuthenticationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasId()) { memoizedIsInitialized = 0; return false; @@ -107,7 +220,7 @@ public final class AuthenticationProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -122,12 +235,12 @@ public final class AuthenticationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -145,14 +258,14 @@ public final class AuthenticationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -162,7 +275,7 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey) obj; - + boolean result = true; result = result && (hasId() == other.hasId()); if (hasId()) { @@ -183,9 +296,13 @@ public final class AuthenticationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasId()) { @@ -201,89 +318,79 @@ public final class AuthenticationProtos { hash = (53 * hash) + getKey().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AuthenticationKey} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKeyOrBuilder { @@ -291,18 +398,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -313,7 +423,7 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); id_ = 0; @@ -324,20 +434,20 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey result = buildPartial(); if (!result.isInitialized()) { @@ -345,17 +455,7 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey(this); int from_bitField0_ = bitField0_; @@ -376,7 +476,7 @@ public final class AuthenticationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey)other); @@ -385,7 +485,7 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.getDefaultInstance()) return this; if (other.hasId()) { @@ -400,7 +500,7 @@ public final class AuthenticationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasId()) { @@ -416,101 +516,109 @@ public final class AuthenticationProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - id_ = input.readInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - expirationDate_ = input.readInt64(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - key_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 id = 1; private int id_ ; + /** + * required int32 id = 1; + */ public boolean hasId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 id = 1; + */ public int getId() { return id_; } + /** + * required int32 id = 1; + */ public Builder setId(int value) { bitField0_ |= 0x00000001; id_ = value; onChanged(); return this; } + /** + * required int32 id = 1; + */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = 0; onChanged(); return this; } - + // required int64 expirationDate = 2; private long expirationDate_ ; + /** + * required int64 expirationDate = 2; + */ public boolean hasExpirationDate() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int64 expirationDate = 2; + */ public long getExpirationDate() { return expirationDate_; } + /** + * required int64 expirationDate = 2; + */ public Builder setExpirationDate(long value) { bitField0_ |= 0x00000002; expirationDate_ = value; onChanged(); return this; } + /** + * required int64 expirationDate = 2; + */ public Builder clearExpirationDate() { bitField0_ = (bitField0_ & ~0x00000002); expirationDate_ = 0L; onChanged(); return this; } - + // required bytes key = 3; private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes key = 3; + */ public boolean hasKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes key = 3; + */ public com.google.protobuf.ByteString getKey() { return key_; } + /** + * required bytes key = 3; + */ public Builder setKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -520,96 +628,242 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * required bytes key = 3; + */ public Builder clearKey() { bitField0_ = (bitField0_ & ~0x00000004); key_ = getDefaultInstance().getKey(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:AuthenticationKey) } - + static { defaultInstance = new AuthenticationKey(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AuthenticationKey) } - + public interface TokenIdentifierOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .TokenIdentifier.Kind kind = 1; + /** + * required .TokenIdentifier.Kind kind = 1; + */ boolean hasKind(); + /** + * required .TokenIdentifier.Kind kind = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind(); - + // required bytes username = 2; + /** + * required bytes username = 2; + */ boolean hasUsername(); + /** + * required bytes username = 2; + */ com.google.protobuf.ByteString getUsername(); - + // required int32 keyId = 3; + /** + * required int32 keyId = 3; + */ boolean hasKeyId(); + /** + * required int32 keyId = 3; + */ int getKeyId(); - + // optional int64 issueDate = 4; + /** + * optional int64 issueDate = 4; + */ boolean hasIssueDate(); + /** + * optional int64 issueDate = 4; + */ long getIssueDate(); - + // optional int64 expirationDate = 5; + /** + * optional int64 expirationDate = 5; + */ boolean hasExpirationDate(); + /** + * optional int64 expirationDate = 5; + */ long getExpirationDate(); - + // optional int64 sequenceNumber = 6; + /** + * optional int64 sequenceNumber = 6; + */ boolean hasSequenceNumber(); + /** + * optional int64 sequenceNumber = 6; + */ long getSequenceNumber(); } + /** + * Protobuf type {@code TokenIdentifier} + */ public static final class TokenIdentifier extends com.google.protobuf.GeneratedMessage implements TokenIdentifierOrBuilder { // Use TokenIdentifier.newBuilder() to construct. - private TokenIdentifier(Builder builder) { + private TokenIdentifier(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TokenIdentifier(boolean noInit) {} - + private TokenIdentifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TokenIdentifier defaultInstance; public static TokenIdentifier getDefaultInstance() { return defaultInstance; } - + public TokenIdentifier getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TokenIdentifier( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind value = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + kind_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + username_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + keyId_ = input.readInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + issueDate_ = input.readInt64(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + expirationDate_ = input.readInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + sequenceNumber_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TokenIdentifier parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TokenIdentifier(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code TokenIdentifier.Kind} + */ public enum Kind implements com.google.protobuf.ProtocolMessageEnum { + /** + * HBASE_AUTH_TOKEN = 0; + */ HBASE_AUTH_TOKEN(0, 0), ; - + + /** + * HBASE_AUTH_TOKEN = 0; + */ public static final int HBASE_AUTH_TOKEN_VALUE = 0; - - + + public final int getNumber() { return value; } - + public static Kind valueOf(int value) { switch (value) { case 0: return HBASE_AUTH_TOKEN; default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -621,7 +875,7 @@ public final class AuthenticationProtos { return Kind.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -634,11 +888,9 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.getDescriptor().getEnumTypes().get(0); } - - private static final Kind[] VALUES = { - HBASE_AUTH_TOKEN, - }; - + + private static final Kind[] VALUES = values(); + public static Kind valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -647,79 +899,115 @@ public final class AuthenticationProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private Kind(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:TokenIdentifier.Kind) } - + private int bitField0_; // required .TokenIdentifier.Kind kind = 1; public static final int KIND_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind kind_; + /** + * required .TokenIdentifier.Kind kind = 1; + */ public boolean hasKind() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .TokenIdentifier.Kind kind = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind() { return kind_; } - + // required bytes username = 2; public static final int USERNAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString username_; + /** + * required bytes username = 2; + */ public boolean hasUsername() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes username = 2; + */ public com.google.protobuf.ByteString getUsername() { return username_; } - + // required int32 keyId = 3; public static final int KEYID_FIELD_NUMBER = 3; private int keyId_; + /** + * required int32 keyId = 3; + */ public boolean hasKeyId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required int32 keyId = 3; + */ public int getKeyId() { return keyId_; } - + // optional int64 issueDate = 4; public static final int ISSUEDATE_FIELD_NUMBER = 4; private long issueDate_; + /** + * optional int64 issueDate = 4; + */ public boolean hasIssueDate() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int64 issueDate = 4; + */ public long getIssueDate() { return issueDate_; } - + // optional int64 expirationDate = 5; public static final int EXPIRATIONDATE_FIELD_NUMBER = 5; private long expirationDate_; + /** + * optional int64 expirationDate = 5; + */ public boolean hasExpirationDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional int64 expirationDate = 5; + */ public long getExpirationDate() { return expirationDate_; } - + // optional int64 sequenceNumber = 6; public static final int SEQUENCENUMBER_FIELD_NUMBER = 6; private long sequenceNumber_; + /** + * optional int64 sequenceNumber = 6; + */ public boolean hasSequenceNumber() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional int64 sequenceNumber = 6; + */ public long getSequenceNumber() { return sequenceNumber_; } - + private void initFields() { kind_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN; username_ = com.google.protobuf.ByteString.EMPTY; @@ -732,7 +1020,7 @@ public final class AuthenticationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasKind()) { memoizedIsInitialized = 0; return false; @@ -748,7 +1036,7 @@ public final class AuthenticationProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -772,12 +1060,12 @@ public final class AuthenticationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -807,14 +1095,14 @@ public final class AuthenticationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -824,7 +1112,7 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier) obj; - + boolean result = true; result = result && (hasKind() == other.hasKind()); if (hasKind()) { @@ -860,9 +1148,13 @@ public final class AuthenticationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasKind()) { @@ -890,89 +1182,79 @@ public final class AuthenticationProtos { hash = (53 * hash) + hashLong(getSequenceNumber()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TokenIdentifier} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifierOrBuilder { @@ -980,18 +1262,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1002,7 +1287,7 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); kind_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN; @@ -1019,20 +1304,20 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier result = buildPartial(); if (!result.isInitialized()) { @@ -1040,17 +1325,7 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier(this); int from_bitField0_ = bitField0_; @@ -1083,7 +1358,7 @@ public final class AuthenticationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier)other); @@ -1092,7 +1367,7 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.getDefaultInstance()) return this; if (other.hasKind()) { @@ -1116,7 +1391,7 @@ public final class AuthenticationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasKind()) { @@ -1132,80 +1407,43 @@ public final class AuthenticationProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind value = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - kind_ = value; - } - break; - } - case 18: { - bitField0_ |= 0x00000002; - username_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - keyId_ = input.readInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - issueDate_ = input.readInt64(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - expirationDate_ = input.readInt64(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - sequenceNumber_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .TokenIdentifier.Kind kind = 1; private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind kind_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN; + /** + * required .TokenIdentifier.Kind kind = 1; + */ public boolean hasKind() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .TokenIdentifier.Kind kind = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind() { return kind_; } + /** + * required .TokenIdentifier.Kind kind = 1; + */ public Builder setKind(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind value) { if (value == null) { throw new NullPointerException(); @@ -1215,21 +1453,33 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * required .TokenIdentifier.Kind kind = 1; + */ public Builder clearKind() { bitField0_ = (bitField0_ & ~0x00000001); kind_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN; onChanged(); return this; } - + // required bytes username = 2; private com.google.protobuf.ByteString username_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes username = 2; + */ public boolean hasUsername() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes username = 2; + */ public com.google.protobuf.ByteString getUsername() { return username_; } + /** + * required bytes username = 2; + */ public Builder setUsername(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1239,182 +1489,375 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * required bytes username = 2; + */ public Builder clearUsername() { bitField0_ = (bitField0_ & ~0x00000002); username_ = getDefaultInstance().getUsername(); onChanged(); return this; } - + // required int32 keyId = 3; private int keyId_ ; + /** + * required int32 keyId = 3; + */ public boolean hasKeyId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required int32 keyId = 3; + */ public int getKeyId() { return keyId_; } + /** + * required int32 keyId = 3; + */ public Builder setKeyId(int value) { bitField0_ |= 0x00000004; keyId_ = value; onChanged(); return this; } + /** + * required int32 keyId = 3; + */ public Builder clearKeyId() { bitField0_ = (bitField0_ & ~0x00000004); keyId_ = 0; onChanged(); return this; } - + // optional int64 issueDate = 4; private long issueDate_ ; + /** + * optional int64 issueDate = 4; + */ public boolean hasIssueDate() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int64 issueDate = 4; + */ public long getIssueDate() { return issueDate_; } + /** + * optional int64 issueDate = 4; + */ public Builder setIssueDate(long value) { bitField0_ |= 0x00000008; issueDate_ = value; onChanged(); return this; } + /** + * optional int64 issueDate = 4; + */ public Builder clearIssueDate() { bitField0_ = (bitField0_ & ~0x00000008); issueDate_ = 0L; onChanged(); return this; } - + // optional int64 expirationDate = 5; private long expirationDate_ ; + /** + * optional int64 expirationDate = 5; + */ public boolean hasExpirationDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional int64 expirationDate = 5; + */ public long getExpirationDate() { return expirationDate_; } + /** + * optional int64 expirationDate = 5; + */ public Builder setExpirationDate(long value) { bitField0_ |= 0x00000010; expirationDate_ = value; onChanged(); return this; } + /** + * optional int64 expirationDate = 5; + */ public Builder clearExpirationDate() { bitField0_ = (bitField0_ & ~0x00000010); expirationDate_ = 0L; onChanged(); return this; } - + // optional int64 sequenceNumber = 6; private long sequenceNumber_ ; + /** + * optional int64 sequenceNumber = 6; + */ public boolean hasSequenceNumber() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional int64 sequenceNumber = 6; + */ public long getSequenceNumber() { return sequenceNumber_; } + /** + * optional int64 sequenceNumber = 6; + */ public Builder setSequenceNumber(long value) { bitField0_ |= 0x00000020; sequenceNumber_ = value; onChanged(); return this; } + /** + * optional int64 sequenceNumber = 6; + */ public Builder clearSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000020); sequenceNumber_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TokenIdentifier) } - + static { defaultInstance = new TokenIdentifier(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TokenIdentifier) } - + public interface TokenOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes identifier = 1; + /** + * optional bytes identifier = 1; + * + *
+     * the TokenIdentifier in serialized form
+     * Note: we can't use the protobuf directly because the Hadoop Token class
+     * only stores the serialized bytes
+     * 
+ */ boolean hasIdentifier(); + /** + * optional bytes identifier = 1; + * + *
+     * the TokenIdentifier in serialized form
+     * Note: we can't use the protobuf directly because the Hadoop Token class
+     * only stores the serialized bytes
+     * 
+ */ com.google.protobuf.ByteString getIdentifier(); - + // optional bytes password = 2; + /** + * optional bytes password = 2; + */ boolean hasPassword(); + /** + * optional bytes password = 2; + */ com.google.protobuf.ByteString getPassword(); - + // optional bytes service = 3; + /** + * optional bytes service = 3; + */ boolean hasService(); + /** + * optional bytes service = 3; + */ com.google.protobuf.ByteString getService(); } + /** + * Protobuf type {@code Token} + * + *
+   * Serialization of the org.apache.hadoop.security.token.Token class
+   * Note that this is a Hadoop class, so fields may change!
+   * 
+ */ public static final class Token extends com.google.protobuf.GeneratedMessage implements TokenOrBuilder { // Use Token.newBuilder() to construct. - private Token(Builder builder) { + private Token(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Token(boolean noInit) {} - + private Token(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Token defaultInstance; public static Token getDefaultInstance() { return defaultInstance; } - + public Token getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Token( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + identifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + password_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + service_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Token parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Token(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes identifier = 1; public static final int IDENTIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString identifier_; + /** + * optional bytes identifier = 1; + * + *
+     * the TokenIdentifier in serialized form
+     * Note: we can't use the protobuf directly because the Hadoop Token class
+     * only stores the serialized bytes
+     * 
+ */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes identifier = 1; + * + *
+     * the TokenIdentifier in serialized form
+     * Note: we can't use the protobuf directly because the Hadoop Token class
+     * only stores the serialized bytes
+     * 
+ */ public com.google.protobuf.ByteString getIdentifier() { return identifier_; } - + // optional bytes password = 2; public static final int PASSWORD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString password_; + /** + * optional bytes password = 2; + */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes password = 2; + */ public com.google.protobuf.ByteString getPassword() { return password_; } - + // optional bytes service = 3; public static final int SERVICE_FIELD_NUMBER = 3; private com.google.protobuf.ByteString service_; + /** + * optional bytes service = 3; + */ public boolean hasService() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes service = 3; + */ public com.google.protobuf.ByteString getService() { return service_; } - + private void initFields() { identifier_ = com.google.protobuf.ByteString.EMPTY; password_ = com.google.protobuf.ByteString.EMPTY; @@ -1424,11 +1867,11 @@ public final class AuthenticationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1443,12 +1886,12 @@ public final class AuthenticationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1466,14 +1909,14 @@ public final class AuthenticationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1483,7 +1926,7 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token) obj; - + boolean result = true; result = result && (hasIdentifier() == other.hasIdentifier()); if (hasIdentifier()) { @@ -1504,9 +1947,13 @@ public final class AuthenticationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIdentifier()) { @@ -1522,89 +1969,84 @@ public final class AuthenticationProtos { hash = (53 * hash) + getService().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Token} + * + *
+     * Serialization of the org.apache.hadoop.security.token.Token class
+     * Note that this is a Hadoop class, so fields may change!
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder { @@ -1612,18 +2054,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1634,7 +2079,7 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); identifier_ = com.google.protobuf.ByteString.EMPTY; @@ -1645,20 +2090,20 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token result = buildPartial(); if (!result.isInitialized()) { @@ -1666,17 +2111,7 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token(this); int from_bitField0_ = bitField0_; @@ -1697,7 +2132,7 @@ public final class AuthenticationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token)other); @@ -1706,7 +2141,7 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance()) return this; if (other.hasIdentifier()) { @@ -1721,63 +2156,65 @@ public final class AuthenticationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - identifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - password_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - service_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes identifier = 1; private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes identifier = 1; + * + *
+       * the TokenIdentifier in serialized form
+       * Note: we can't use the protobuf directly because the Hadoop Token class
+       * only stores the serialized bytes
+       * 
+ */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes identifier = 1; + * + *
+       * the TokenIdentifier in serialized form
+       * Note: we can't use the protobuf directly because the Hadoop Token class
+       * only stores the serialized bytes
+       * 
+ */ public com.google.protobuf.ByteString getIdentifier() { return identifier_; } + /** + * optional bytes identifier = 1; + * + *
+       * the TokenIdentifier in serialized form
+       * Note: we can't use the protobuf directly because the Hadoop Token class
+       * only stores the serialized bytes
+       * 
+ */ public Builder setIdentifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1787,21 +2224,39 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * optional bytes identifier = 1; + * + *
+       * the TokenIdentifier in serialized form
+       * Note: we can't use the protobuf directly because the Hadoop Token class
+       * only stores the serialized bytes
+       * 
+ */ public Builder clearIdentifier() { bitField0_ = (bitField0_ & ~0x00000001); identifier_ = getDefaultInstance().getIdentifier(); onChanged(); return this; } - + // optional bytes password = 2; private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes password = 2; + */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes password = 2; + */ public com.google.protobuf.ByteString getPassword() { return password_; } + /** + * optional bytes password = 2; + */ public Builder setPassword(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1811,21 +2266,33 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * optional bytes password = 2; + */ public Builder clearPassword() { bitField0_ = (bitField0_ & ~0x00000002); password_ = getDefaultInstance().getPassword(); onChanged(); return this; } - + // optional bytes service = 3; private com.google.protobuf.ByteString service_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes service = 3; + */ public boolean hasService() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes service = 3; + */ public com.google.protobuf.ByteString getService() { return service_; } + /** + * optional bytes service = 3; + */ public Builder setService(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1835,90 +2302,158 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * optional bytes service = 3; + */ public Builder clearService() { bitField0_ = (bitField0_ & ~0x00000004); service_ = getDefaultInstance().getService(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Token) } - + static { defaultInstance = new Token(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Token) } - + public interface TokenRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code TokenRequest} + * + *
+   * RPC request & response messages
+   * 
+ */ public static final class TokenRequest extends com.google.protobuf.GeneratedMessage implements TokenRequestOrBuilder { // Use TokenRequest.newBuilder() to construct. - private TokenRequest(Builder builder) { + private TokenRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TokenRequest(boolean noInit) {} - + private TokenRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TokenRequest defaultInstance; public static TokenRequest getDefaultInstance() { return defaultInstance; } - + public TokenRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TokenRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TokenRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TokenRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1928,101 +2463,99 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TokenRequest} + * + *
+     * RPC request & response messages
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequestOrBuilder { @@ -2030,18 +2563,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2052,25 +2588,25 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2078,23 +2614,13 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)other); @@ -2103,106 +2629,189 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:TokenRequest) } - + static { defaultInstance = new TokenRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TokenRequest) } - + public interface TokenResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .Token token = 1; + /** + * optional .Token token = 1; + */ boolean hasToken(); + /** + * optional .Token token = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken(); + /** + * optional .Token token = 1; + */ org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder(); } + /** + * Protobuf type {@code TokenResponse} + */ public static final class TokenResponse extends com.google.protobuf.GeneratedMessage implements TokenResponseOrBuilder { // Use TokenResponse.newBuilder() to construct. - private TokenResponse(Builder builder) { + private TokenResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TokenResponse(boolean noInit) {} - + private TokenResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TokenResponse defaultInstance; public static TokenResponse getDefaultInstance() { return defaultInstance; } - + public TokenResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TokenResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = token_.toBuilder(); + } + token_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(token_); + token_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TokenResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TokenResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .Token token = 1; public static final int TOKEN_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token token_; + /** + * optional .Token token = 1; + */ public boolean hasToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Token token = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken() { return token_; } + /** + * optional .Token token = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder() { return token_; } - + private void initFields() { token_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance(); } @@ -2210,11 +2819,11 @@ public final class AuthenticationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2223,12 +2832,12 @@ public final class AuthenticationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2238,14 +2847,14 @@ public final class AuthenticationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2255,7 +2864,7 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) obj; - + boolean result = true; result = result && (hasToken() == other.hasToken()); if (hasToken()) { @@ -2266,9 +2875,13 @@ public final class AuthenticationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasToken()) { @@ -2276,89 +2889,79 @@ public final class AuthenticationProtos { hash = (53 * hash) + getToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TokenResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponseOrBuilder { @@ -2366,18 +2969,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2389,7 +2995,7 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (tokenBuilder_ == null) { @@ -2400,20 +3006,20 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2421,17 +3027,7 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse(this); int from_bitField0_ = bitField0_; @@ -2448,7 +3044,7 @@ public final class AuthenticationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse)other); @@ -2457,7 +3053,7 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance()) return this; if (other.hasToken()) { @@ -2466,56 +3062,43 @@ public final class AuthenticationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.newBuilder(); - if (hasToken()) { - subBuilder.mergeFrom(getToken()); - } - input.readMessage(subBuilder, extensionRegistry); - setToken(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .Token token = 1; private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token token_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder> tokenBuilder_; + /** + * optional .Token token = 1; + */ public boolean hasToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Token token = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken() { if (tokenBuilder_ == null) { return token_; @@ -2523,6 +3106,9 @@ public final class AuthenticationProtos { return tokenBuilder_.getMessage(); } } + /** + * optional .Token token = 1; + */ public Builder setToken(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token value) { if (tokenBuilder_ == null) { if (value == null) { @@ -2536,6 +3122,9 @@ public final class AuthenticationProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Token token = 1; + */ public Builder setToken( org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder builderForValue) { if (tokenBuilder_ == null) { @@ -2547,6 +3136,9 @@ public final class AuthenticationProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Token token = 1; + */ public Builder mergeToken(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2563,6 +3155,9 @@ public final class AuthenticationProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Token token = 1; + */ public Builder clearToken() { if (tokenBuilder_ == null) { token_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance(); @@ -2573,11 +3168,17 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .Token token = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder getTokenBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTokenFieldBuilder().getBuilder(); } + /** + * optional .Token token = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder() { if (tokenBuilder_ != null) { return tokenBuilder_.getMessageOrBuilder(); @@ -2585,6 +3186,9 @@ public final class AuthenticationProtos { return token_; } } + /** + * optional .Token token = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder> getTokenFieldBuilder() { @@ -2598,84 +3202,145 @@ public final class AuthenticationProtos { } return tokenBuilder_; } - + // @@protoc_insertion_point(builder_scope:TokenResponse) } - + static { defaultInstance = new TokenResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TokenResponse) } - + public interface WhoAmIRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code WhoAmIRequest} + */ public static final class WhoAmIRequest extends com.google.protobuf.GeneratedMessage implements WhoAmIRequestOrBuilder { // Use WhoAmIRequest.newBuilder() to construct. - private WhoAmIRequest(Builder builder) { + private WhoAmIRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WhoAmIRequest(boolean noInit) {} - + private WhoAmIRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WhoAmIRequest defaultInstance; public static WhoAmIRequest getDefaultInstance() { return defaultInstance; } - + public WhoAmIRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WhoAmIRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WhoAmIRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WhoAmIRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2685,101 +3350,95 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WhoAmIRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequestOrBuilder { @@ -2787,18 +3446,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2809,25 +3471,25 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2835,23 +3497,13 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)other); @@ -2860,160 +3512,266 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:WhoAmIRequest) } - + static { defaultInstance = new WhoAmIRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WhoAmIRequest) } - + public interface WhoAmIResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string username = 1; + /** + * optional string username = 1; + */ boolean hasUsername(); - String getUsername(); - + /** + * optional string username = 1; + */ + java.lang.String getUsername(); + /** + * optional string username = 1; + */ + com.google.protobuf.ByteString + getUsernameBytes(); + // optional string authMethod = 2; + /** + * optional string authMethod = 2; + */ boolean hasAuthMethod(); - String getAuthMethod(); + /** + * optional string authMethod = 2; + */ + java.lang.String getAuthMethod(); + /** + * optional string authMethod = 2; + */ + com.google.protobuf.ByteString + getAuthMethodBytes(); } + /** + * Protobuf type {@code WhoAmIResponse} + */ public static final class WhoAmIResponse extends com.google.protobuf.GeneratedMessage implements WhoAmIResponseOrBuilder { // Use WhoAmIResponse.newBuilder() to construct. - private WhoAmIResponse(Builder builder) { + private WhoAmIResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WhoAmIResponse(boolean noInit) {} - + private WhoAmIResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WhoAmIResponse defaultInstance; public static WhoAmIResponse getDefaultInstance() { return defaultInstance; } - + public WhoAmIResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WhoAmIResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + username_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + authMethod_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WhoAmIResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WhoAmIResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string username = 1; public static final int USERNAME_FIELD_NUMBER = 1; private java.lang.Object username_; + /** + * optional string username = 1; + */ public boolean hasUsername() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getUsername() { + /** + * optional string username = 1; + */ + public java.lang.String getUsername() { java.lang.Object ref = username_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { username_ = s; } return s; } } - private com.google.protobuf.ByteString getUsernameBytes() { + /** + * optional string username = 1; + */ + public com.google.protobuf.ByteString + getUsernameBytes() { java.lang.Object ref = username_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); username_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string authMethod = 2; public static final int AUTHMETHOD_FIELD_NUMBER = 2; private java.lang.Object authMethod_; + /** + * optional string authMethod = 2; + */ public boolean hasAuthMethod() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getAuthMethod() { + /** + * optional string authMethod = 2; + */ + public java.lang.String getAuthMethod() { java.lang.Object ref = authMethod_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { authMethod_ = s; } return s; } } - private com.google.protobuf.ByteString getAuthMethodBytes() { + /** + * optional string authMethod = 2; + */ + public com.google.protobuf.ByteString + getAuthMethodBytes() { java.lang.Object ref = authMethod_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); authMethod_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { username_ = ""; authMethod_ = ""; @@ -3022,11 +3780,11 @@ public final class AuthenticationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3038,12 +3796,12 @@ public final class AuthenticationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3057,14 +3815,14 @@ public final class AuthenticationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3074,7 +3832,7 @@ public final class AuthenticationProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse) obj; - + boolean result = true; result = result && (hasUsername() == other.hasUsername()); if (hasUsername()) { @@ -3090,9 +3848,13 @@ public final class AuthenticationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUsername()) { @@ -3104,89 +3866,79 @@ public final class AuthenticationProtos { hash = (53 * hash) + getAuthMethod().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WhoAmIResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponseOrBuilder { @@ -3194,18 +3946,21 @@ public final class AuthenticationProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3216,7 +3971,7 @@ public final class AuthenticationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); username_ = ""; @@ -3225,20 +3980,20 @@ public final class AuthenticationProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse build() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3246,17 +4001,7 @@ public final class AuthenticationProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse(this); int from_bitField0_ = bitField0_; @@ -3273,7 +4018,7 @@ public final class AuthenticationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse)other); @@ -3282,78 +4027,89 @@ public final class AuthenticationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance()) return this; if (other.hasUsername()) { - setUsername(other.getUsername()); + bitField0_ |= 0x00000001; + username_ = other.username_; + onChanged(); } if (other.hasAuthMethod()) { - setAuthMethod(other.getAuthMethod()); + bitField0_ |= 0x00000002; + authMethod_ = other.authMethod_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - username_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - authMethod_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string username = 1; private java.lang.Object username_ = ""; + /** + * optional string username = 1; + */ public boolean hasUsername() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getUsername() { + /** + * optional string username = 1; + */ + public java.lang.String getUsername() { java.lang.Object ref = username_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); username_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string username = 1; + */ + public com.google.protobuf.ByteString + getUsernameBytes() { + java.lang.Object ref = username_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + username_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setUsername(String value) { + /** + * optional string username = 1; + */ + public Builder setUsername( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3362,34 +4118,72 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * optional string username = 1; + */ public Builder clearUsername() { bitField0_ = (bitField0_ & ~0x00000001); username_ = getDefaultInstance().getUsername(); onChanged(); return this; } - void setUsername(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string username = 1; + */ + public Builder setUsernameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; username_ = value; onChanged(); + return this; } - + // optional string authMethod = 2; private java.lang.Object authMethod_ = ""; + /** + * optional string authMethod = 2; + */ public boolean hasAuthMethod() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getAuthMethod() { + /** + * optional string authMethod = 2; + */ + public java.lang.String getAuthMethod() { java.lang.Object ref = authMethod_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); authMethod_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string authMethod = 2; + */ + public com.google.protobuf.ByteString + getAuthMethodBytes() { + java.lang.Object ref = authMethod_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + authMethod_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setAuthMethod(String value) { + /** + * optional string authMethod = 2; + */ + public Builder setAuthMethod( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3398,46 +4192,70 @@ public final class AuthenticationProtos { onChanged(); return this; } + /** + * optional string authMethod = 2; + */ public Builder clearAuthMethod() { bitField0_ = (bitField0_ & ~0x00000002); authMethod_ = getDefaultInstance().getAuthMethod(); onChanged(); return this; } - void setAuthMethod(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string authMethod = 2; + */ + public Builder setAuthMethodBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; authMethod_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:WhoAmIResponse) } - + static { defaultInstance = new WhoAmIResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WhoAmIResponse) } - + + /** + * Protobuf service {@code AuthenticationService} + * + *
+   * RPC service
+   * 
+ */ public static abstract class AuthenticationService implements com.google.protobuf.Service { protected AuthenticationService() {} - + public interface Interface { + /** + * rpc getAuthenticationToken(.TokenRequest) returns (.TokenResponse); + */ public abstract void getAuthenticationToken( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc whoami(.WhoAmIRequest) returns (.WhoAmIResponse); + */ public abstract void whoami( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new AuthenticationService() { @@ -3448,7 +4266,7 @@ public final class AuthenticationProtos { com.google.protobuf.RpcCallback done) { impl.getAuthenticationToken(controller, request, done); } - + @java.lang.Override public void whoami( com.google.protobuf.RpcController controller, @@ -3456,10 +4274,10 @@ public final class AuthenticationProtos { com.google.protobuf.RpcCallback done) { impl.whoami(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -3467,7 +4285,7 @@ public final class AuthenticationProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3487,7 +4305,7 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3505,7 +4323,7 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3523,20 +4341,26 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc getAuthenticationToken(.TokenRequest) returns (.TokenResponse); + */ public abstract void getAuthenticationToken( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc whoami(.WhoAmIRequest) returns (.WhoAmIResponse); + */ public abstract void whoami( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -3546,7 +4370,7 @@ public final class AuthenticationProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3573,7 +4397,7 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3591,7 +4415,7 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3609,23 +4433,23 @@ public final class AuthenticationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void getAuthenticationToken( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request, @@ -3640,7 +4464,7 @@ public final class AuthenticationProtos { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance())); } - + public void whoami( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request, @@ -3656,31 +4480,31 @@ public final class AuthenticationProtos { org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getAuthenticationToken( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoami( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getAuthenticationToken( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request) @@ -3691,8 +4515,8 @@ public final class AuthenticationProtos { request, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoami( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request) @@ -3703,10 +4527,12 @@ public final class AuthenticationProtos { request, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:AuthenticationService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_AuthenticationKey_descriptor; private static @@ -3742,7 +4568,7 @@ public final class AuthenticationProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_WhoAmIResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -3779,57 +4605,43 @@ public final class AuthenticationProtos { internal_static_AuthenticationKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AuthenticationKey_descriptor, - new java.lang.String[] { "Id", "ExpirationDate", "Key", }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.Builder.class); + new java.lang.String[] { "Id", "ExpirationDate", "Key", }); internal_static_TokenIdentifier_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_TokenIdentifier_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TokenIdentifier_descriptor, - new java.lang.String[] { "Kind", "Username", "KeyId", "IssueDate", "ExpirationDate", "SequenceNumber", }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Builder.class); + new java.lang.String[] { "Kind", "Username", "KeyId", "IssueDate", "ExpirationDate", "SequenceNumber", }); internal_static_Token_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_Token_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Token_descriptor, - new java.lang.String[] { "Identifier", "Password", "Service", }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder.class); + new java.lang.String[] { "Identifier", "Password", "Service", }); internal_static_TokenRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_TokenRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TokenRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.Builder.class); + new java.lang.String[] { }); internal_static_TokenResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_TokenResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TokenResponse_descriptor, - new java.lang.String[] { "Token", }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.Builder.class); + new java.lang.String[] { "Token", }); internal_static_WhoAmIRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_WhoAmIRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WhoAmIRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.Builder.class); + new java.lang.String[] { }); internal_static_WhoAmIResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_WhoAmIResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WhoAmIResponse_descriptor, - new java.lang.String[] { "Username", "AuthMethod", }, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.Builder.class); + new java.lang.String[] { "Username", "AuthMethod", }); return null; } }; @@ -3838,6 +4650,6 @@ public final class AuthenticationProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index e387020..4e0db03 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -10,78 +10,191 @@ public final class ClientProtos { } public interface ColumnOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes family = 1; + /** + * required bytes family = 1; + */ boolean hasFamily(); + /** + * required bytes family = 1; + */ com.google.protobuf.ByteString getFamily(); - + // repeated bytes qualifier = 2; + /** + * repeated bytes qualifier = 2; + */ java.util.List getQualifierList(); + /** + * repeated bytes qualifier = 2; + */ int getQualifierCount(); + /** + * repeated bytes qualifier = 2; + */ com.google.protobuf.ByteString getQualifier(int index); } + /** + * Protobuf type {@code Column} + * + *
+   **
+   * Container for a list of column qualifier names of a family.
+   * 
+ */ public static final class Column extends com.google.protobuf.GeneratedMessage implements ColumnOrBuilder { // Use Column.newBuilder() to construct. - private Column(Builder builder) { + private Column(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Column(boolean noInit) {} - + private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Column defaultInstance; public static Column getDefaultInstance() { return defaultInstance; } - + public Column getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Column( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + qualifier_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = java.util.Collections.unmodifiableList(qualifier_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Column parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Column(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // repeated bytes qualifier = 2; public static final int QUALIFIER_FIELD_NUMBER = 2; private java.util.List qualifier_; + /** + * repeated bytes qualifier = 2; + */ public java.util.List getQualifierList() { return qualifier_; } + /** + * repeated bytes qualifier = 2; + */ public int getQualifierCount() { return qualifier_.size(); } + /** + * repeated bytes qualifier = 2; + */ public com.google.protobuf.ByteString getQualifier(int index) { return qualifier_.get(index); } - + private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = java.util.Collections.emptyList();; + qualifier_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFamily()) { memoizedIsInitialized = 0; return false; @@ -89,7 +202,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -101,12 +214,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -125,14 +238,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -142,7 +255,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; - + boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { @@ -155,9 +268,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { @@ -169,89 +286,84 @@ public final class ClientProtos { hash = (53 * hash) + getQualifierList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Column} + * + *
+     **
+     * Container for a list of column qualifier names of a family.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { @@ -259,18 +371,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -281,29 +396,29 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = java.util.Collections.emptyList();; + qualifier_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); if (!result.isInitialized()) { @@ -311,17 +426,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); int from_bitField0_ = bitField0_; @@ -339,7 +444,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); @@ -348,7 +453,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; if (other.hasFamily()) { @@ -367,7 +472,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFamily()) { @@ -375,54 +480,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - ensureQualifierIsMutable(); - qualifier_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 1; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -432,31 +526,46 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes family = 1; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // repeated bytes qualifier = 2; - private java.util.List qualifier_ = java.util.Collections.emptyList();; + private java.util.List qualifier_ = java.util.Collections.emptyList(); private void ensureQualifierIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = new java.util.ArrayList(qualifier_); bitField0_ |= 0x00000002; } } + /** + * repeated bytes qualifier = 2; + */ public java.util.List getQualifierList() { return java.util.Collections.unmodifiableList(qualifier_); } + /** + * repeated bytes qualifier = 2; + */ public int getQualifierCount() { return qualifier_.size(); } + /** + * repeated bytes qualifier = 2; + */ public com.google.protobuf.ByteString getQualifier(int index) { return qualifier_.get(index); } + /** + * repeated bytes qualifier = 2; + */ public Builder setQualifier( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -467,6 +576,9 @@ public final class ClientProtos { onChanged(); return this; } + /** + * repeated bytes qualifier = 2; + */ public Builder addQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -476,6 +588,9 @@ public final class ClientProtos { onChanged(); return this; } + /** + * repeated bytes qualifier = 2; + */ public Builder addAllQualifier( java.lang.Iterable values) { ensureQualifierIsMutable(); @@ -483,224 +598,523 @@ public final class ClientProtos { onChanged(); return this; } + /** + * repeated bytes qualifier = 2; + */ public Builder clearQualifier() { - qualifier_ = java.util.Collections.emptyList();; + qualifier_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Column) } - + static { defaultInstance = new Column(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Column) } - + public interface GetOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // repeated .Column column = 2; + /** + * repeated .Column column = 2; + */ java.util.List getColumnList(); + /** + * repeated .Column column = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + /** + * repeated .Column column = 2; + */ int getColumnCount(); + /** + * repeated .Column column = 2; + */ java.util.List getColumnOrBuilderList(); + /** + * repeated .Column column = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - + // repeated .NameBytesPair attribute = 3; + /** + * repeated .NameBytesPair attribute = 3; + */ java.util.List getAttributeList(); + /** + * repeated .NameBytesPair attribute = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + /** + * repeated .NameBytesPair attribute = 3; + */ int getAttributeCount(); + /** + * repeated .NameBytesPair attribute = 3; + */ java.util.List getAttributeOrBuilderList(); + /** + * repeated .NameBytesPair attribute = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - + // optional .Filter filter = 4; + /** + * optional .Filter filter = 4; + */ boolean hasFilter(); + /** + * optional .Filter filter = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + /** + * optional .Filter filter = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - + // optional .TimeRange timeRange = 5; + /** + * optional .TimeRange timeRange = 5; + */ boolean hasTimeRange(); + /** + * optional .TimeRange timeRange = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + /** + * optional .TimeRange timeRange = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - + // optional uint32 maxVersions = 6 [default = 1]; + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ boolean hasMaxVersions(); + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ int getMaxVersions(); - + // optional bool cacheBlocks = 7 [default = true]; + /** + * optional bool cacheBlocks = 7 [default = true]; + */ boolean hasCacheBlocks(); + /** + * optional bool cacheBlocks = 7 [default = true]; + */ boolean getCacheBlocks(); - + // optional uint32 storeLimit = 8; + /** + * optional uint32 storeLimit = 8; + */ boolean hasStoreLimit(); + /** + * optional uint32 storeLimit = 8; + */ int getStoreLimit(); - + // optional uint32 storeOffset = 9; + /** + * optional uint32 storeOffset = 9; + */ boolean hasStoreOffset(); + /** + * optional uint32 storeOffset = 9; + */ int getStoreOffset(); } + /** + * Protobuf type {@code Get} + * + *
+   **
+   * The protocol buffer version of Get
+   * 
+ */ public static final class Get extends com.google.protobuf.GeneratedMessage implements GetOrBuilder { // Use Get.newBuilder() to construct. - private Get(Builder builder) { + private Get(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Get(boolean noInit) {} - + private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Get defaultInstance; public static Get getDefaultInstance() { return defaultInstance; } - + public Get getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Get( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + column_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = timeRange_.toBuilder(); + } + timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(timeRange_); + timeRange_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + case 48: { + bitField0_ |= 0x00000008; + maxVersions_ = input.readUInt32(); + break; + } + case 56: { + bitField0_ |= 0x00000010; + cacheBlocks_ = input.readBool(); + break; + } + case 64: { + bitField0_ |= 0x00000020; + storeLimit_ = input.readUInt32(); + break; + } + case 72: { + bitField0_ |= 0x00000040; + storeOffset_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + column_ = java.util.Collections.unmodifiableList(column_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Get parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Get(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // repeated .Column column = 2; public static final int COLUMN_FIELD_NUMBER = 2; private java.util.List column_; + /** + * repeated .Column column = 2; + */ public java.util.List getColumnList() { return column_; } + /** + * repeated .Column column = 2; + */ public java.util.List getColumnOrBuilderList() { return column_; } + /** + * repeated .Column column = 2; + */ public int getColumnCount() { return column_.size(); } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } - + // repeated .NameBytesPair attribute = 3; public static final int ATTRIBUTE_FIELD_NUMBER = 3; private java.util.List attribute_; + /** + * repeated .NameBytesPair attribute = 3; + */ public java.util.List getAttributeList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 3; + */ public java.util.List getAttributeOrBuilderList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 3; + */ public int getAttributeCount() { return attribute_.size(); } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } - + // optional .Filter filter = 4; public static final int FILTER_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + /** + * optional .Filter filter = 4; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Filter filter = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { return filter_; } + /** + * optional .Filter filter = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } - + // optional .TimeRange timeRange = 5; public static final int TIMERANGE_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + /** + * optional .TimeRange timeRange = 5; + */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .TimeRange timeRange = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_; } + /** + * optional .TimeRange timeRange = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_; } - + // optional uint32 maxVersions = 6 [default = 1]; public static final int MAXVERSIONS_FIELD_NUMBER = 6; private int maxVersions_; + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public int getMaxVersions() { return maxVersions_; } - + // optional bool cacheBlocks = 7 [default = true]; public static final int CACHEBLOCKS_FIELD_NUMBER = 7; private boolean cacheBlocks_; + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public boolean getCacheBlocks() { return cacheBlocks_; } - + // optional uint32 storeLimit = 8; public static final int STORELIMIT_FIELD_NUMBER = 8; private int storeLimit_; + /** + * optional uint32 storeLimit = 8; + */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 storeLimit = 8; + */ public int getStoreLimit() { return storeLimit_; } - + // optional uint32 storeOffset = 9; public static final int STOREOFFSET_FIELD_NUMBER = 9; private int storeOffset_; + /** + * optional uint32 storeOffset = 9; + */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 storeOffset = 9; + */ public int getStoreOffset() { return storeOffset_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; column_ = java.util.Collections.emptyList(); @@ -716,7 +1130,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -742,7 +1156,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -775,12 +1189,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -822,14 +1236,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -839,7 +1253,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -884,9 +1298,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -926,89 +1344,84 @@ public final class ClientProtos { hash = (53 * hash) + getStoreOffset(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Get} + * + *
+     **
+     * The protocol buffer version of Get
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { @@ -1016,18 +1429,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1042,7 +1458,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -1081,20 +1497,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000100); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); if (!result.isInitialized()) { @@ -1102,17 +1518,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); int from_bitField0_ = bitField0_; @@ -1175,7 +1581,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); @@ -1184,7 +1590,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; if (other.hasRow()) { @@ -1263,7 +1669,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -1289,99 +1695,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 48: { - bitField0_ |= 0x00000020; - maxVersions_ = input.readUInt32(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - cacheBlocks_ = input.readBool(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - storeLimit_ = input.readUInt32(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - storeOffset_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1391,13 +1741,16 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // repeated .Column column = 2; private java.util.List column_ = java.util.Collections.emptyList(); @@ -1407,10 +1760,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - + + /** + * repeated .Column column = 2; + */ public java.util.List getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); @@ -1418,6 +1774,9 @@ public final class ClientProtos { return columnBuilder_.getMessageList(); } } + /** + * repeated .Column column = 2; + */ public int getColumnCount() { if (columnBuilder_ == null) { return column_.size(); @@ -1425,6 +1784,9 @@ public final class ClientProtos { return columnBuilder_.getCount(); } } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); @@ -1432,6 +1794,9 @@ public final class ClientProtos { return columnBuilder_.getMessage(index); } } + /** + * repeated .Column column = 2; + */ public Builder setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { @@ -1446,6 +1811,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -1457,6 +1825,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { @@ -1470,6 +1841,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { @@ -1484,6 +1858,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -1495,6 +1872,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -1506,6 +1886,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder addAllColumn( java.lang.Iterable values) { if (columnBuilder_ == null) { @@ -1517,6 +1900,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder clearColumn() { if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); @@ -1527,6 +1913,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public Builder removeColumn(int index) { if (columnBuilder_ == null) { ensureColumnIsMutable(); @@ -1537,10 +1926,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { @@ -1548,6 +1943,9 @@ public final class ClientProtos { return columnBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Column column = 2; + */ public java.util.List getColumnOrBuilderList() { if (columnBuilder_ != null) { @@ -1556,15 +1954,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(column_); } } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } + /** + * repeated .Column column = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } + /** + * repeated .Column column = 2; + */ public java.util.List getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); @@ -1583,7 +1990,7 @@ public final class ClientProtos { } return columnBuilder_; } - + // repeated .NameBytesPair attribute = 3; private java.util.List attribute_ = java.util.Collections.emptyList(); @@ -1593,10 +2000,13 @@ public final class ClientProtos { bitField0_ |= 0x00000004; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - + + /** + * repeated .NameBytesPair attribute = 3; + */ public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); @@ -1604,6 +2014,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageList(); } } + /** + * repeated .NameBytesPair attribute = 3; + */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); @@ -1611,6 +2024,9 @@ public final class ClientProtos { return attributeBuilder_.getCount(); } } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); @@ -1618,6 +2034,9 @@ public final class ClientProtos { return attributeBuilder_.getMessage(index); } } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -1632,6 +2051,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -1643,6 +2065,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { @@ -1656,6 +2081,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -1670,6 +2098,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -1681,6 +2112,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -1692,6 +2126,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder addAllAttribute( java.lang.Iterable values) { if (attributeBuilder_ == null) { @@ -1703,6 +2140,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); @@ -1713,6 +2153,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); @@ -1723,10 +2166,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { @@ -1734,6 +2183,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameBytesPair attribute = 3; + */ public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { @@ -1742,15 +2194,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 3; + */ public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); @@ -1769,14 +2230,20 @@ public final class ClientProtos { } return attributeBuilder_; } - + // optional .Filter filter = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + /** + * optional .Filter filter = 4; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .Filter filter = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; @@ -1784,6 +2251,9 @@ public final class ClientProtos { return filterBuilder_.getMessage(); } } + /** + * optional .Filter filter = 4; + */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { @@ -1797,6 +2267,9 @@ public final class ClientProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .Filter filter = 4; + */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { @@ -1808,6 +2281,9 @@ public final class ClientProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .Filter filter = 4; + */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && @@ -1824,6 +2300,9 @@ public final class ClientProtos { bitField0_ |= 0x00000008; return this; } + /** + * optional .Filter filter = 4; + */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); @@ -1834,11 +2313,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } + /** + * optional .Filter filter = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000008; onChanged(); return getFilterFieldBuilder().getBuilder(); } + /** + * optional .Filter filter = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); @@ -1846,6 +2331,9 @@ public final class ClientProtos { return filter_; } } + /** + * optional .Filter filter = 4; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> getFilterFieldBuilder() { @@ -1859,14 +2347,20 @@ public final class ClientProtos { } return filterBuilder_; } - + // optional .TimeRange timeRange = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + /** + * optional .TimeRange timeRange = 5; + */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .TimeRange timeRange = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_; @@ -1874,6 +2368,9 @@ public final class ClientProtos { return timeRangeBuilder_.getMessage(); } } + /** + * optional .TimeRange timeRange = 5; + */ public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { @@ -1887,6 +2384,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .TimeRange timeRange = 5; + */ public Builder setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { @@ -1898,6 +2398,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .TimeRange timeRange = 5; + */ public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -1914,6 +2417,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .TimeRange timeRange = 5; + */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); @@ -1924,11 +2430,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * optional .TimeRange timeRange = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000010; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } + /** + * optional .TimeRange timeRange = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); @@ -1936,6 +2448,9 @@ public final class ClientProtos { return timeRange_; } } + /** + * optional .TimeRange timeRange = 5; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { @@ -1949,179 +2464,433 @@ public final class ClientProtos { } return timeRangeBuilder_; } - + // optional uint32 maxVersions = 6 [default = 1]; private int maxVersions_ = 1; + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public int getMaxVersions() { return maxVersions_; } + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public Builder setMaxVersions(int value) { bitField0_ |= 0x00000020; maxVersions_ = value; onChanged(); return this; } + /** + * optional uint32 maxVersions = 6 [default = 1]; + */ public Builder clearMaxVersions() { bitField0_ = (bitField0_ & ~0x00000020); maxVersions_ = 1; onChanged(); return this; } - + // optional bool cacheBlocks = 7 [default = true]; private boolean cacheBlocks_ = true; + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public boolean getCacheBlocks() { return cacheBlocks_; } + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public Builder setCacheBlocks(boolean value) { bitField0_ |= 0x00000040; cacheBlocks_ = value; onChanged(); return this; } + /** + * optional bool cacheBlocks = 7 [default = true]; + */ public Builder clearCacheBlocks() { bitField0_ = (bitField0_ & ~0x00000040); cacheBlocks_ = true; onChanged(); return this; } - + // optional uint32 storeLimit = 8; private int storeLimit_ ; + /** + * optional uint32 storeLimit = 8; + */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint32 storeLimit = 8; + */ public int getStoreLimit() { return storeLimit_; } + /** + * optional uint32 storeLimit = 8; + */ public Builder setStoreLimit(int value) { bitField0_ |= 0x00000080; storeLimit_ = value; onChanged(); return this; } + /** + * optional uint32 storeLimit = 8; + */ public Builder clearStoreLimit() { bitField0_ = (bitField0_ & ~0x00000080); storeLimit_ = 0; onChanged(); return this; } - + // optional uint32 storeOffset = 9; private int storeOffset_ ; + /** + * optional uint32 storeOffset = 9; + */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint32 storeOffset = 9; + */ public int getStoreOffset() { return storeOffset_; } + /** + * optional uint32 storeOffset = 9; + */ public Builder setStoreOffset(int value) { bitField0_ |= 0x00000100; storeOffset_ = value; onChanged(); return this; } + /** + * optional uint32 storeOffset = 9; + */ public Builder clearStoreOffset() { bitField0_ = (bitField0_ & ~0x00000100); storeOffset_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Get) } - + static { defaultInstance = new Get(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Get) } - + public interface ResultOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Cell cell = 1; + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ java.util.List getCellList(); + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell getCell(int index); + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ int getCellCount(); + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ java.util.List getCellOrBuilderList(); + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder getCellOrBuilder( int index); - + // optional int32 associatedCellCount = 2; + /** + * optional int32 associatedCellCount = 2; + * + *
+     * The below count is set when the associated cells are
+     * not part of this protobuf message; they are passed alongside
+     * and then this Message is just a placeholder with metadata.
+     * The count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ boolean hasAssociatedCellCount(); + /** + * optional int32 associatedCellCount = 2; + * + *
+     * The below count is set when the associated cells are
+     * not part of this protobuf message; they are passed alongside
+     * and then this Message is just a placeholder with metadata.
+     * The count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ int getAssociatedCellCount(); } + /** + * Protobuf type {@code Result} + */ public static final class Result extends com.google.protobuf.GeneratedMessage implements ResultOrBuilder { // Use Result.newBuilder() to construct. - private Result(Builder builder) { + private Result(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Result(boolean noInit) {} - + private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Result defaultInstance; public static Result getDefaultInstance() { return defaultInstance; } - + public Result getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Result( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + cell_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.PARSER, extensionRegistry)); + break; + } + case 16: { + bitField0_ |= 0x00000001; + associatedCellCount_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + cell_ = java.util.Collections.unmodifiableList(cell_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Result parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Result(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // repeated .Cell cell = 1; public static final int CELL_FIELD_NUMBER = 1; private java.util.List cell_; + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ public java.util.List getCellList() { return cell_; } + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ public java.util.List getCellOrBuilderList() { return cell_; } + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ public int getCellCount() { return cell_.size(); } + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell getCell(int index) { return cell_.get(index); } + /** + * repeated .Cell cell = 1; + * + *
+     * Result includes the Cells or else it just has a count of Cells
+     * that are carried otherwise.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder getCellOrBuilder( int index) { return cell_.get(index); } - + // optional int32 associatedCellCount = 2; public static final int ASSOCIATEDCELLCOUNT_FIELD_NUMBER = 2; private int associatedCellCount_; + /** + * optional int32 associatedCellCount = 2; + * + *
+     * The below count is set when the associated cells are
+     * not part of this protobuf message; they are passed alongside
+     * and then this Message is just a placeholder with metadata.
+     * The count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional int32 associatedCellCount = 2; + * + *
+     * The below count is set when the associated cells are
+     * not part of this protobuf message; they are passed alongside
+     * and then this Message is just a placeholder with metadata.
+     * The count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ public int getAssociatedCellCount() { return associatedCellCount_; } - + private void initFields() { cell_ = java.util.Collections.emptyList(); associatedCellCount_ = 0; @@ -2130,11 +2899,11 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2146,12 +2915,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < cell_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -2165,14 +2934,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2182,7 +2951,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; - + boolean result = true; result = result && getCellList() .equals(other.getCellList()); @@ -2195,9 +2964,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getCellCount() > 0) { @@ -2209,89 +2982,79 @@ public final class ClientProtos { hash = (53 * hash) + getAssociatedCellCount(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Result} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { @@ -2299,18 +3062,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2322,7 +3088,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (cellBuilder_ == null) { @@ -2335,20 +3101,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); if (!result.isInitialized()) { @@ -2356,17 +3122,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); int from_bitField0_ = bitField0_; @@ -2388,7 +3144,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); @@ -2397,7 +3153,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; if (cellBuilder_ == null) { @@ -2432,51 +3188,30 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addCell(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - associatedCellCount_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Cell cell = 1; private java.util.List cell_ = java.util.Collections.emptyList(); @@ -2486,10 +3221,18 @@ public final class ClientProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder> cellBuilder_; - + + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public java.util.List getCellList() { if (cellBuilder_ == null) { return java.util.Collections.unmodifiableList(cell_); @@ -2497,6 +3240,14 @@ public final class ClientProtos { return cellBuilder_.getMessageList(); } } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public int getCellCount() { if (cellBuilder_ == null) { return cell_.size(); @@ -2504,6 +3255,14 @@ public final class ClientProtos { return cellBuilder_.getCount(); } } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell getCell(int index) { if (cellBuilder_ == null) { return cell_.get(index); @@ -2511,6 +3270,14 @@ public final class ClientProtos { return cellBuilder_.getMessage(index); } } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder setCell( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell value) { if (cellBuilder_ == null) { @@ -2525,6 +3292,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder setCell( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { @@ -2536,6 +3311,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell value) { if (cellBuilder_ == null) { if (value == null) { @@ -2549,6 +3332,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder addCell( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell value) { if (cellBuilder_ == null) { @@ -2563,6 +3354,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder addCell( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { @@ -2574,6 +3373,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder addCell( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { @@ -2585,6 +3392,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder addAllCell( java.lang.Iterable values) { if (cellBuilder_ == null) { @@ -2596,6 +3411,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder clearCell() { if (cellBuilder_ == null) { cell_ = java.util.Collections.emptyList(); @@ -2606,6 +3429,14 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public Builder removeCell(int index) { if (cellBuilder_ == null) { ensureCellIsMutable(); @@ -2616,10 +3447,26 @@ public final class ClientProtos { } return this; } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder getCellBuilder( int index) { return getCellFieldBuilder().getBuilder(index); } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder getCellOrBuilder( int index) { if (cellBuilder_ == null) { @@ -2627,6 +3474,14 @@ public final class ClientProtos { return cellBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public java.util.List getCellOrBuilderList() { if (cellBuilder_ != null) { @@ -2635,15 +3490,39 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(cell_); } } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder addCellBuilder() { return getCellFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance()); } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder addCellBuilder( int index) { return getCellFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance()); } + /** + * repeated .Cell cell = 1; + * + *
+       * Result includes the Cells or else it just has a count of Cells
+       * that are carried otherwise.
+       * 
+ */ public java.util.List getCellBuilderList() { return getCellFieldBuilder().getBuilderList(); @@ -2662,135 +3541,395 @@ public final class ClientProtos { } return cellBuilder_; } - + // optional int32 associatedCellCount = 2; private int associatedCellCount_ ; + /** + * optional int32 associatedCellCount = 2; + * + *
+       * The below count is set when the associated cells are
+       * not part of this protobuf message; they are passed alongside
+       * and then this Message is just a placeholder with metadata.
+       * The count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int32 associatedCellCount = 2; + * + *
+       * The below count is set when the associated cells are
+       * not part of this protobuf message; they are passed alongside
+       * and then this Message is just a placeholder with metadata.
+       * The count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public int getAssociatedCellCount() { return associatedCellCount_; } + /** + * optional int32 associatedCellCount = 2; + * + *
+       * The below count is set when the associated cells are
+       * not part of this protobuf message; they are passed alongside
+       * and then this Message is just a placeholder with metadata.
+       * The count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000002; associatedCellCount_ = value; onChanged(); return this; } + /** + * optional int32 associatedCellCount = 2; + * + *
+       * The below count is set when the associated cells are
+       * not part of this protobuf message; they are passed alongside
+       * and then this Message is just a placeholder with metadata.
+       * The count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000002); associatedCellCount_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Result) } - + static { defaultInstance = new Result(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Result) } - + public interface GetRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // required .Get get = 2; + /** + * required .Get get = 2; + */ boolean hasGet(); + /** + * required .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + /** + * required .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); - + // optional bool closestRowBefore = 3; + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ boolean hasClosestRowBefore(); + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ boolean getClosestRowBefore(); - + // optional bool existenceOnly = 4; + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ boolean hasExistenceOnly(); + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ boolean getExistenceOnly(); } + /** + * Protobuf type {@code GetRequest} + * + *
+   **
+   * The get request. Perform a single Get operation.
+   * Unless existenceOnly is specified, return all the requested data
+   * for the row that matches exactly, or the one that immediately
+   * precedes it if closestRowBefore is specified.
+   *
+   * If existenceOnly is set, only the existence will be returned.
+   * 
+ */ public static final class GetRequest extends com.google.protobuf.GeneratedMessage implements GetRequestOrBuilder { // Use GetRequest.newBuilder() to construct. - private GetRequest(Builder builder) { + private GetRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetRequest(boolean noInit) {} - + private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetRequest defaultInstance; public static GetRequest getDefaultInstance() { return defaultInstance; } - + public GetRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = get_.toBuilder(); + } + get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(get_); + get_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 24: { + bitField0_ |= 0x00000004; + closestRowBefore_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + existenceOnly_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // required .Get get = 2; public static final int GET_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; + /** + * required .Get get = 2; + */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { return get_; } + /** + * required .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { return get_; } - + // optional bool closestRowBefore = 3; public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; private boolean closestRowBefore_; + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ public boolean hasClosestRowBefore() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ public boolean getClosestRowBefore() { return closestRowBefore_; } - + // optional bool existenceOnly = 4; public static final int EXISTENCEONLY_FIELD_NUMBER = 4; private boolean existenceOnly_; + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ public boolean getExistenceOnly() { return existenceOnly_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); @@ -2801,7 +3940,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -2821,7 +3960,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2839,12 +3978,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2866,14 +4005,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2883,7 +4022,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -2909,9 +4048,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -2931,89 +4074,89 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getExistenceOnly()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetRequest} + * + *
+     **
+     * The get request. Perform a single Get operation.
+     * Unless existenceOnly is specified, return all the requested data
+     * for the row that matches exactly, or the one that immediately
+     * precedes it if closestRowBefore is specified.
+     *
+     * If existenceOnly is set, only the existence will be returned.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { @@ -3021,18 +4164,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3045,7 +4191,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -3066,20 +4212,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3087,17 +4233,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); int from_bitField0_ = bitField0_; @@ -3130,7 +4266,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); @@ -3139,7 +4275,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -3157,7 +4293,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -3177,71 +4313,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); - if (hasGet()) { - subBuilder.mergeFrom(getGet()); - } - input.readMessage(subBuilder, extensionRegistry); - setGet(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - closestRowBefore_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - existenceOnly_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -3249,6 +4353,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -3262,6 +4369,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -3273,6 +4383,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3289,6 +4402,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -3299,11 +4415,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -3311,6 +4433,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -3324,14 +4449,20 @@ public final class ClientProtos { } return regionBuilder_; } - + // required .Get get = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; + /** + * required .Get get = 2; + */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { return get_; @@ -3339,6 +4470,9 @@ public final class ClientProtos { return getBuilder_.getMessage(); } } + /** + * required .Get get = 2; + */ public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { @@ -3352,6 +4486,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Get get = 2; + */ public Builder setGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { @@ -3363,6 +4500,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Get get = 2; + */ public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -3379,6 +4519,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .Get get = 2; + */ public Builder clearGet() { if (getBuilder_ == null) { get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); @@ -3389,11 +4532,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getGetFieldBuilder().getBuilder(); } + /** + * required .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); @@ -3401,6 +4550,9 @@ public final class ClientProtos { return get_; } } + /** + * required .Get get = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { @@ -3414,169 +4566,449 @@ public final class ClientProtos { } return getBuilder_; } - + // optional bool closestRowBefore = 3; private boolean closestRowBefore_ ; + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public boolean hasClosestRowBefore() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public boolean getClosestRowBefore() { return closestRowBefore_; } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public Builder setClosestRowBefore(boolean value) { bitField0_ |= 0x00000004; closestRowBefore_ = value; onChanged(); return this; } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public Builder clearClosestRowBefore() { bitField0_ = (bitField0_ & ~0x00000004); closestRowBefore_ = false; onChanged(); return this; } - + // optional bool existenceOnly = 4; private boolean existenceOnly_ ; + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public boolean getExistenceOnly() { return existenceOnly_; } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public Builder setExistenceOnly(boolean value) { bitField0_ |= 0x00000008; existenceOnly_ = value; onChanged(); return this; } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public Builder clearExistenceOnly() { bitField0_ = (bitField0_ & ~0x00000008); existenceOnly_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetRequest) } - + static { defaultInstance = new GetRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetRequest) } - + public interface MultiGetRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // repeated .Get get = 2; + /** + * repeated .Get get = 2; + */ java.util.List getGetList(); + /** + * repeated .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(int index); + /** + * repeated .Get get = 2; + */ int getGetCount(); + /** + * repeated .Get get = 2; + */ java.util.List getGetOrBuilderList(); + /** + * repeated .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder( int index); - + // optional bool closestRowBefore = 3; + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ boolean hasClosestRowBefore(); + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ boolean getClosestRowBefore(); - + // optional bool existenceOnly = 4; + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ boolean hasExistenceOnly(); + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ boolean getExistenceOnly(); } + /** + * Protobuf type {@code MultiGetRequest} + */ public static final class MultiGetRequest extends com.google.protobuf.GeneratedMessage implements MultiGetRequestOrBuilder { // Use MultiGetRequest.newBuilder() to construct. - private MultiGetRequest(Builder builder) { + private MultiGetRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiGetRequest(boolean noInit) {} - + private MultiGetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiGetRequest defaultInstance; public static MultiGetRequest getDefaultInstance() { return defaultInstance; } - + public MultiGetRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiGetRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + get_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + get_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry)); + break; + } + case 24: { + bitField0_ |= 0x00000002; + closestRowBefore_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000004; + existenceOnly_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + get_ = java.util.Collections.unmodifiableList(get_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiGetRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiGetRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // repeated .Get get = 2; public static final int GET_FIELD_NUMBER = 2; private java.util.List get_; + /** + * repeated .Get get = 2; + */ public java.util.List getGetList() { return get_; } + /** + * repeated .Get get = 2; + */ public java.util.List getGetOrBuilderList() { return get_; } + /** + * repeated .Get get = 2; + */ public int getGetCount() { return get_.size(); } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(int index) { return get_.get(index); } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder( int index) { return get_.get(index); } - + // optional bool closestRowBefore = 3; public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; private boolean closestRowBefore_; + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ public boolean hasClosestRowBefore() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool closestRowBefore = 3; + * + *
+     * If the row to get doesn't exist, return the
+     * closest row before.
+     * 
+ */ public boolean getClosestRowBefore() { return closestRowBefore_; } - + // optional bool existenceOnly = 4; public static final int EXISTENCEONLY_FIELD_NUMBER = 4; private boolean existenceOnly_; + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool existenceOnly = 4; + * + *
+     * The result isn't asked for, just check for
+     * the existence. If closestRowBefore specified,
+     * this will be ignored
+     * 
+ */ public boolean getExistenceOnly() { return existenceOnly_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); get_ = java.util.Collections.emptyList(); @@ -3587,7 +5019,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -3605,7 +5037,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3623,12 +5055,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3650,14 +5082,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3667,7 +5099,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -3690,9 +5122,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -3712,89 +5148,79 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getExistenceOnly()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiGetRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequestOrBuilder { @@ -3802,18 +5228,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3826,7 +5255,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -3847,20 +5276,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3868,17 +5297,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest(this); int from_bitField0_ = bitField0_; @@ -3912,7 +5331,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest)other); @@ -3921,7 +5340,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -3962,7 +5381,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -3980,68 +5399,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addGet(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - closestRowBefore_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - existenceOnly_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -4049,6 +5439,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -4062,6 +5455,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -4073,6 +5469,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -4089,6 +5488,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -4099,11 +5501,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -4111,6 +5519,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -4124,7 +5535,7 @@ public final class ClientProtos { } return regionBuilder_; } - + // repeated .Get get = 2; private java.util.List get_ = java.util.Collections.emptyList(); @@ -4134,10 +5545,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; - + + /** + * repeated .Get get = 2; + */ public java.util.List getGetList() { if (getBuilder_ == null) { return java.util.Collections.unmodifiableList(get_); @@ -4145,6 +5559,9 @@ public final class ClientProtos { return getBuilder_.getMessageList(); } } + /** + * repeated .Get get = 2; + */ public int getGetCount() { if (getBuilder_ == null) { return get_.size(); @@ -4152,6 +5569,9 @@ public final class ClientProtos { return getBuilder_.getCount(); } } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(int index) { if (getBuilder_ == null) { return get_.get(index); @@ -4159,6 +5579,9 @@ public final class ClientProtos { return getBuilder_.getMessage(index); } } + /** + * repeated .Get get = 2; + */ public Builder setGet( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { @@ -4173,6 +5596,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder setGet( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { @@ -4184,6 +5610,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder addGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { @@ -4197,6 +5626,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder addGet( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { @@ -4211,6 +5643,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder addGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { @@ -4222,6 +5657,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder addGet( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { @@ -4233,6 +5671,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder addAllGet( java.lang.Iterable values) { if (getBuilder_ == null) { @@ -4244,6 +5685,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder clearGet() { if (getBuilder_ == null) { get_ = java.util.Collections.emptyList(); @@ -4254,6 +5698,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public Builder removeGet(int index) { if (getBuilder_ == null) { ensureGetIsMutable(); @@ -4264,10 +5711,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder( int index) { return getGetFieldBuilder().getBuilder(index); } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder( int index) { if (getBuilder_ == null) { @@ -4275,6 +5728,9 @@ public final class ClientProtos { return getBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Get get = 2; + */ public java.util.List getGetOrBuilderList() { if (getBuilder_ != null) { @@ -4283,15 +5739,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(get_); } } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder addGetBuilder() { return getGetFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()); } + /** + * repeated .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder addGetBuilder( int index) { return getGetFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()); } + /** + * repeated .Get get = 2; + */ public java.util.List getGetBuilderList() { return getGetFieldBuilder().getBuilderList(); @@ -4310,124 +5775,318 @@ public final class ClientProtos { } return getBuilder_; } - + // optional bool closestRowBefore = 3; private boolean closestRowBefore_ ; + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public boolean hasClosestRowBefore() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public boolean getClosestRowBefore() { return closestRowBefore_; } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public Builder setClosestRowBefore(boolean value) { bitField0_ |= 0x00000004; closestRowBefore_ = value; onChanged(); return this; } + /** + * optional bool closestRowBefore = 3; + * + *
+       * If the row to get doesn't exist, return the
+       * closest row before.
+       * 
+ */ public Builder clearClosestRowBefore() { bitField0_ = (bitField0_ & ~0x00000004); closestRowBefore_ = false; onChanged(); return this; } - + // optional bool existenceOnly = 4; private boolean existenceOnly_ ; + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public boolean getExistenceOnly() { return existenceOnly_; } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public Builder setExistenceOnly(boolean value) { bitField0_ |= 0x00000008; existenceOnly_ = value; onChanged(); return this; } + /** + * optional bool existenceOnly = 4; + * + *
+       * The result isn't asked for, just check for
+       * the existence. If closestRowBefore specified,
+       * this will be ignored
+       * 
+ */ public Builder clearExistenceOnly() { bitField0_ = (bitField0_ & ~0x00000008); existenceOnly_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MultiGetRequest) } - + static { defaultInstance = new MultiGetRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiGetRequest) } - + public interface GetResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .Result result = 1; + /** + * optional .Result result = 1; + */ boolean hasResult(); + /** + * optional .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + /** + * optional .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - + // optional bool exists = 2; + /** + * optional bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ boolean hasExists(); + /** + * optional bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ boolean getExists(); } + /** + * Protobuf type {@code GetResponse} + */ public static final class GetResponse extends com.google.protobuf.GeneratedMessage implements GetResponseOrBuilder { // Use GetResponse.newBuilder() to construct. - private GetResponse(Builder builder) { + private GetResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetResponse(boolean noInit) {} - + private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetResponse defaultInstance; public static GetResponse getDefaultInstance() { return defaultInstance; } - + public GetResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = result_.toBuilder(); + } + result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(result_); + result_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + exists_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + /** + * optional .Result result = 1; + */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { return result_; } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_; } - + // optional bool exists = 2; public static final int EXISTS_FIELD_NUMBER = 2; private boolean exists_; + /** + * optional bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ public boolean hasExists() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ public boolean getExists() { return exists_; } - + private void initFields() { result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); exists_ = false; @@ -4436,11 +6095,11 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4452,12 +6111,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4471,14 +6130,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4488,7 +6147,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; - + boolean result = true; result = result && (hasResult() == other.hasResult()); if (hasResult()) { @@ -4504,9 +6163,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResult()) { @@ -4518,89 +6181,79 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getExists()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { @@ -4608,18 +6261,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4631,7 +6287,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (resultBuilder_ == null) { @@ -4644,20 +6300,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); if (!result.isInitialized()) { @@ -4665,17 +6321,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); int from_bitField0_ = bitField0_; @@ -4696,7 +6342,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); @@ -4705,7 +6351,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; if (other.hasResult()) { @@ -4717,61 +6363,43 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - exists_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .Result result = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + /** + * optional .Result result = 1; + */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_; @@ -4779,6 +6407,9 @@ public final class ClientProtos { return resultBuilder_.getMessage(); } } + /** + * optional .Result result = 1; + */ public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { @@ -4792,6 +6423,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -4803,6 +6437,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -4819,6 +6456,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); @@ -4829,11 +6469,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); @@ -4841,6 +6487,9 @@ public final class ClientProtos { return result_; } } + /** + * optional .Result result = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { @@ -4854,133 +6503,330 @@ public final class ClientProtos { } return resultBuilder_; } - + // optional bool exists = 2; private boolean exists_ ; + /** + * optional bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public boolean hasExists() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public boolean getExists() { return exists_; } + /** + * optional bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder setExists(boolean value) { bitField0_ |= 0x00000002; exists_ = value; onChanged(); return this; } + /** + * optional bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder clearExists() { bitField0_ = (bitField0_ & ~0x00000002); exists_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetResponse) } - + static { defaultInstance = new GetResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetResponse) } - + public interface MultiGetResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Result result = 1; + /** + * repeated .Result result = 1; + */ java.util.List getResultList(); + /** + * repeated .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); + /** + * repeated .Result result = 1; + */ int getResultCount(); + /** + * repeated .Result result = 1; + */ java.util.List getResultOrBuilderList(); + /** + * repeated .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index); - + // repeated bool exists = 2; + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ java.util.List getExistsList(); + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ int getExistsCount(); + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ boolean getExists(int index); } + /** + * Protobuf type {@code MultiGetResponse} + */ public static final class MultiGetResponse extends com.google.protobuf.GeneratedMessage implements MultiGetResponseOrBuilder { // Use MultiGetResponse.newBuilder() to construct. - private MultiGetResponse(Builder builder) { + private MultiGetResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiGetResponse(boolean noInit) {} - + private MultiGetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiGetResponse defaultInstance; public static MultiGetResponse getDefaultInstance() { return defaultInstance; } - + public MultiGetResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiGetResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); + break; + } + case 16: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + exists_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + exists_.add(input.readBool()); + break; + } + case 18: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + exists_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + exists_.add(input.readBool()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + exists_ = java.util.Collections.unmodifiableList(exists_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiGetResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiGetResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private java.util.List result_; + /** + * repeated .Result result = 1; + */ public java.util.List getResultList() { return result_; } + /** + * repeated .Result result = 1; + */ public java.util.List getResultOrBuilderList() { return result_; } + /** + * repeated .Result result = 1; + */ public int getResultCount() { return result_.size(); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { return result_.get(index); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { return result_.get(index); } - + // repeated bool exists = 2; public static final int EXISTS_FIELD_NUMBER = 2; private java.util.List exists_; + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ public java.util.List getExistsList() { return exists_; } + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ public int getExistsCount() { return exists_.size(); } + /** + * repeated bool exists = 2; + * + *
+     * used for Get to check existence only
+     * 
+ */ public boolean getExists(int index) { return exists_.get(index); } - + private void initFields() { result_ = java.util.Collections.emptyList(); - exists_ = java.util.Collections.emptyList();; + exists_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4992,12 +6838,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < result_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -5013,14 +6859,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5030,7 +6876,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse) obj; - + boolean result = true; result = result && getResultList() .equals(other.getResultList()); @@ -5040,9 +6886,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getResultCount() > 0) { @@ -5054,89 +6904,79 @@ public final class ClientProtos { hash = (53 * hash) + getExistsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiGetResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponseOrBuilder { @@ -5144,18 +6984,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5167,7 +7010,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (resultBuilder_ == null) { @@ -5176,24 +7019,24 @@ public final class ClientProtos { } else { resultBuilder_.clear(); } - exists_ = java.util.Collections.emptyList();; + exists_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiGetResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse result = buildPartial(); if (!result.isInitialized()) { @@ -5201,17 +7044,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse(this); int from_bitField0_ = bitField0_; @@ -5232,7 +7065,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse)other); @@ -5241,7 +7074,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.getDefaultInstance()) return this; if (resultBuilder_ == null) { @@ -5283,60 +7116,30 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } - case 16: { - ensureExistsIsMutable(); - exists_.add(input.readBool()); - break; - } - case 18: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - while (input.getBytesUntilLimit() > 0) { - addExists(input.readBool()); - } - input.popLimit(limit); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Result result = 1; private java.util.List result_ = java.util.Collections.emptyList(); @@ -5346,10 +7149,13 @@ public final class ClientProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - + + /** + * repeated .Result result = 1; + */ public java.util.List getResultList() { if (resultBuilder_ == null) { return java.util.Collections.unmodifiableList(result_); @@ -5357,6 +7163,9 @@ public final class ClientProtos { return resultBuilder_.getMessageList(); } } + /** + * repeated .Result result = 1; + */ public int getResultCount() { if (resultBuilder_ == null) { return result_.size(); @@ -5364,6 +7173,9 @@ public final class ClientProtos { return resultBuilder_.getCount(); } } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { if (resultBuilder_ == null) { return result_.get(index); @@ -5371,6 +7183,9 @@ public final class ClientProtos { return resultBuilder_.getMessage(index); } } + /** + * repeated .Result result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { @@ -5385,6 +7200,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -5396,6 +7214,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { @@ -5409,6 +7230,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { @@ -5423,6 +7247,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -5434,6 +7261,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -5445,6 +7275,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addAllResult( java.lang.Iterable values) { if (resultBuilder_ == null) { @@ -5456,6 +7289,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = java.util.Collections.emptyList(); @@ -5466,6 +7302,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder removeResult(int index) { if (resultBuilder_ == null) { ensureResultIsMutable(); @@ -5476,10 +7315,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( int index) { return getResultFieldBuilder().getBuilder(index); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { if (resultBuilder_ == null) { @@ -5487,6 +7332,9 @@ public final class ClientProtos { return resultBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Result result = 1; + */ public java.util.List getResultOrBuilderList() { if (resultBuilder_ != null) { @@ -5495,15 +7343,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(result_); } } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { return getResultFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( int index) { return getResultFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } + /** + * repeated .Result result = 1; + */ public java.util.List getResultBuilderList() { return getResultFieldBuilder().getBuilderList(); @@ -5522,25 +7379,53 @@ public final class ClientProtos { } return resultBuilder_; } - + // repeated bool exists = 2; - private java.util.List exists_ = java.util.Collections.emptyList();; + private java.util.List exists_ = java.util.Collections.emptyList(); private void ensureExistsIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { exists_ = new java.util.ArrayList(exists_); bitField0_ |= 0x00000002; } } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public java.util.List getExistsList() { return java.util.Collections.unmodifiableList(exists_); } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public int getExistsCount() { return exists_.size(); } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public boolean getExists(int index) { return exists_.get(index); } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder setExists( int index, boolean value) { ensureExistsIsMutable(); @@ -5548,12 +7433,26 @@ public final class ClientProtos { onChanged(); return this; } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder addExists(boolean value) { ensureExistsIsMutable(); exists_.add(value); onChanged(); return this; } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder addAllExists( java.lang.Iterable values) { ensureExistsIsMutable(); @@ -5561,130 +7460,312 @@ public final class ClientProtos { onChanged(); return this; } + /** + * repeated bool exists = 2; + * + *
+       * used for Get to check existence only
+       * 
+ */ public Builder clearExists() { - exists_ = java.util.Collections.emptyList();; + exists_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MultiGetResponse) } - + static { defaultInstance = new MultiGetResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiGetResponse) } - + public interface ConditionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // required bytes family = 2; + /** + * required bytes family = 2; + */ boolean hasFamily(); + /** + * required bytes family = 2; + */ com.google.protobuf.ByteString getFamily(); - + // required bytes qualifier = 3; + /** + * required bytes qualifier = 3; + */ boolean hasQualifier(); + /** + * required bytes qualifier = 3; + */ com.google.protobuf.ByteString getQualifier(); - + // required .CompareType compareType = 4; + /** + * required .CompareType compareType = 4; + */ boolean hasCompareType(); + /** + * required .CompareType compareType = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType(); - + // required .Comparator comparator = 5; + /** + * required .Comparator comparator = 5; + */ boolean hasComparator(); + /** + * required .Comparator comparator = 5; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + /** + * required .Comparator comparator = 5; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); } + /** + * Protobuf type {@code Condition} + * + *
+   **
+   * Condition to check if the value of a given cell (row,
+   * family, qualifier) matches a value via a given comparator.
+   *
+   * Condition is used in check and mutate operations.
+   * 
+ */ public static final class Condition extends com.google.protobuf.GeneratedMessage implements ConditionOrBuilder { // Use Condition.newBuilder() to construct. - private Condition(Builder builder) { + private Condition(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Condition(boolean noInit) {} - + private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Condition defaultInstance; public static Condition getDefaultInstance() { return defaultInstance; } - + public Condition getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Condition( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + compareType_ = value; + } + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + subBuilder = comparator_.toBuilder(); + } + comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparator_); + comparator_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000010; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Condition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Condition(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // required bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // required bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; + /** + * required bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + // required .CompareType compareType = 4; public static final int COMPARETYPE_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_; + /** + * required .CompareType compareType = 4; + */ public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .CompareType compareType = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { return compareType_; } - + // required .Comparator comparator = 5; public static final int COMPARATOR_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + /** + * required .Comparator comparator = 5; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * required .Comparator comparator = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_; } + /** + * required .Comparator comparator = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; family_ = com.google.protobuf.ByteString.EMPTY; @@ -5696,7 +7777,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -5724,7 +7805,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5745,12 +7826,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -5776,14 +7857,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5793,7 +7874,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -5824,9 +7905,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -5850,89 +7935,87 @@ public final class ClientProtos { hash = (53 * hash) + getComparator().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Condition} + * + *
+     **
+     * Condition to check if the value of a given cell (row,
+     * family, qualifier) matches a value via a given comparator.
+     *
+     * Condition is used in check and mutate operations.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { @@ -5940,18 +8023,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5963,7 +8049,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -5982,20 +8068,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); if (!result.isInitialized()) { @@ -6003,17 +8089,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); int from_bitField0_ = bitField0_; @@ -6046,7 +8122,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); @@ -6055,7 +8131,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; if (other.hasRow()) { @@ -6076,7 +8152,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -6104,79 +8180,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - compareType_ = value; - } - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -6186,21 +8226,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // required bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 2; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -6210,21 +8262,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes family = 2; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000002); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // required bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * required bytes qualifier = 3; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -6234,21 +8298,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes qualifier = 3; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000004); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // required .CompareType compareType = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + /** + * required .CompareType compareType = 4; + */ public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .CompareType compareType = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { return compareType_; } + /** + * required .CompareType compareType = 4; + */ public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); @@ -6258,20 +8334,29 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required .CompareType compareType = 4; + */ public Builder clearCompareType() { bitField0_ = (bitField0_ & ~0x00000008); compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; onChanged(); return this; } - + // required .Comparator comparator = 5; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + /** + * required .Comparator comparator = 5; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * required .Comparator comparator = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_; @@ -6279,6 +8364,9 @@ public final class ClientProtos { return comparatorBuilder_.getMessage(); } } + /** + * required .Comparator comparator = 5; + */ public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { @@ -6292,6 +8380,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * required .Comparator comparator = 5; + */ public Builder setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { @@ -6303,6 +8394,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * required .Comparator comparator = 5; + */ public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -6319,6 +8413,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * required .Comparator comparator = 5; + */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); @@ -6329,11 +8426,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * required .Comparator comparator = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000010; onChanged(); return getComparatorFieldBuilder().getBuilder(); } + /** + * required .Comparator comparator = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); @@ -6341,6 +8444,9 @@ public final class ClientProtos { return comparator_; } } + /** + * required .Comparator comparator = 5; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { @@ -6354,110 +8460,376 @@ public final class ClientProtos { } return comparatorBuilder_; } - + // @@protoc_insertion_point(builder_scope:Condition) } - + static { defaultInstance = new Condition(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Condition) } - + public interface MutationProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes row = 1; + /** + * optional bytes row = 1; + */ boolean hasRow(); + /** + * optional bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // optional .MutationProto.MutationType mutateType = 2; + /** + * optional .MutationProto.MutationType mutateType = 2; + */ boolean hasMutateType(); + /** + * optional .MutationProto.MutationType mutateType = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType(); - + // repeated .MutationProto.ColumnValue columnValue = 3; + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ java.util.List getColumnValueList(); + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index); + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ int getColumnValueCount(); + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ java.util.List getColumnValueOrBuilderList(); + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index); - + // optional uint64 timestamp = 4; + /** + * optional uint64 timestamp = 4; + */ boolean hasTimestamp(); + /** + * optional uint64 timestamp = 4; + */ long getTimestamp(); - + // repeated .NameBytesPair attribute = 5; + /** + * repeated .NameBytesPair attribute = 5; + */ java.util.List getAttributeList(); + /** + * repeated .NameBytesPair attribute = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + /** + * repeated .NameBytesPair attribute = 5; + */ int getAttributeCount(); + /** + * repeated .NameBytesPair attribute = 5; + */ java.util.List getAttributeOrBuilderList(); + /** + * repeated .NameBytesPair attribute = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - + // optional bool writeToWAL = 6 [default = true]; + /** + * optional bool writeToWAL = 6 [default = true]; + */ boolean hasWriteToWAL(); + /** + * optional bool writeToWAL = 6 [default = true]; + */ boolean getWriteToWAL(); - + // optional .TimeRange timeRange = 7; + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ boolean hasTimeRange(); + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - + // optional int32 associatedCellCount = 8; + /** + * optional int32 associatedCellCount = 8; + * + *
+     * The below count is set when the associated cells are NOT
+     * part of this protobuf message; they are passed alongside
+     * and then this Message is a placeholder with metadata.  The
+     * count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ boolean hasAssociatedCellCount(); + /** + * optional int32 associatedCellCount = 8; + * + *
+     * The below count is set when the associated cells are NOT
+     * part of this protobuf message; they are passed alongside
+     * and then this Message is a placeholder with metadata.  The
+     * count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ int getAssociatedCellCount(); } + /** + * Protobuf type {@code MutationProto} + * + *
+   **
+   * A specific mutation inside a mutate request.
+   * It can be an append, increment, put or delete based
+   * on the mutation type.  It can be fully filled in or
+   * only metadata present because data is being carried
+   * elsewhere outside of pb.
+   * 
+ */ public static final class MutationProto extends com.google.protobuf.GeneratedMessage implements MutationProtoOrBuilder { // Use MutationProto.newBuilder() to construct. - private MutationProto(Builder builder) { + private MutationProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MutationProto(boolean noInit) {} - + private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MutationProto defaultInstance; public static MutationProto getDefaultInstance() { return defaultInstance; } - + public MutationProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MutationProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + mutateType_ = value; + } + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry)); + break; + } + case 32: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + attribute_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + break; + } + case 48: { + bitField0_ |= 0x00000008; + writeToWAL_ = input.readBool(); + break; + } + case 58: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + subBuilder = timeRange_.toBuilder(); + } + timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(timeRange_); + timeRange_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000010; + break; + } + case 64: { + bitField0_ |= 0x00000020; + associatedCellCount_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = java.util.Collections.unmodifiableList(columnValue_); + } + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MutationProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutationProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code MutationProto.MutationType} + */ public enum MutationType implements com.google.protobuf.ProtocolMessageEnum { + /** + * APPEND = 0; + */ APPEND(0, 0), + /** + * INCREMENT = 1; + */ INCREMENT(1, 1), + /** + * PUT = 2; + */ PUT(2, 2), + /** + * DELETE = 3; + */ DELETE(3, 3), ; - + + /** + * APPEND = 0; + */ public static final int APPEND_VALUE = 0; + /** + * INCREMENT = 1; + */ public static final int INCREMENT_VALUE = 1; + /** + * PUT = 2; + */ public static final int PUT_VALUE = 2; + /** + * DELETE = 3; + */ public static final int DELETE_VALUE = 3; - - + + public final int getNumber() { return value; } - + public static MutationType valueOf(int value) { switch (value) { case 0: return APPEND; @@ -6467,7 +8839,7 @@ public final class ClientProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -6479,7 +8851,7 @@ public final class ClientProtos { return MutationType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -6492,11 +8864,9 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0); } - - private static final MutationType[] VALUES = { - APPEND, INCREMENT, PUT, DELETE, - }; - + + private static final MutationType[] VALUES = values(); + public static MutationType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -6505,32 +8875,53 @@ public final class ClientProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private MutationType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:MutationProto.MutationType) } - + + /** + * Protobuf enum {@code MutationProto.DeleteType} + */ public enum DeleteType implements com.google.protobuf.ProtocolMessageEnum { + /** + * DELETE_ONE_VERSION = 0; + */ DELETE_ONE_VERSION(0, 0), + /** + * DELETE_MULTIPLE_VERSIONS = 1; + */ DELETE_MULTIPLE_VERSIONS(1, 1), + /** + * DELETE_FAMILY = 2; + */ DELETE_FAMILY(2, 2), ; - + + /** + * DELETE_ONE_VERSION = 0; + */ public static final int DELETE_ONE_VERSION_VALUE = 0; + /** + * DELETE_MULTIPLE_VERSIONS = 1; + */ public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; + /** + * DELETE_FAMILY = 2; + */ public static final int DELETE_FAMILY_VALUE = 2; - - + + public final int getNumber() { return value; } - + public static DeleteType valueOf(int value) { switch (value) { case 0: return DELETE_ONE_VERSION; @@ -6539,7 +8930,7 @@ public final class ClientProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -6551,7 +8942,7 @@ public final class ClientProtos { return DeleteType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -6564,11 +8955,9 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1); } - - private static final DeleteType[] VALUES = { - DELETE_ONE_VERSION, DELETE_MULTIPLE_VERSIONS, DELETE_FAMILY, - }; - + + private static final DeleteType[] VALUES = values(); + public static DeleteType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -6577,151 +8966,386 @@ public final class ClientProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private DeleteType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:MutationProto.DeleteType) } - + public interface ColumnValueOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes family = 1; + /** + * required bytes family = 1; + */ boolean hasFamily(); + /** + * required bytes family = 1; + */ com.google.protobuf.ByteString getFamily(); - + // repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ java.util.List getQualifierValueList(); + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index); + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ int getQualifierValueCount(); + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ java.util.List getQualifierValueOrBuilderList(); + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index); } + /** + * Protobuf type {@code MutationProto.ColumnValue} + */ public static final class ColumnValue extends com.google.protobuf.GeneratedMessage implements ColumnValueOrBuilder { // Use ColumnValue.newBuilder() to construct. - private ColumnValue(Builder builder) { + private ColumnValue(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnValue(boolean noInit) {} - + private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnValue defaultInstance; public static ColumnValue getDefaultInstance() { return defaultInstance; } - + public ColumnValue getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnValue(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + public interface QualifierValueOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes qualifier = 1; + /** + * optional bytes qualifier = 1; + */ boolean hasQualifier(); + /** + * optional bytes qualifier = 1; + */ com.google.protobuf.ByteString getQualifier(); - + // optional bytes value = 2; + /** + * optional bytes value = 2; + */ boolean hasValue(); + /** + * optional bytes value = 2; + */ com.google.protobuf.ByteString getValue(); - + // optional uint64 timestamp = 3; + /** + * optional uint64 timestamp = 3; + */ boolean hasTimestamp(); + /** + * optional uint64 timestamp = 3; + */ long getTimestamp(); - + // optional .MutationProto.DeleteType deleteType = 4; + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ boolean hasDeleteType(); + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType(); } + /** + * Protobuf type {@code MutationProto.ColumnValue.QualifierValue} + */ public static final class QualifierValue extends com.google.protobuf.GeneratedMessage implements QualifierValueOrBuilder { // Use QualifierValue.newBuilder() to construct. - private QualifierValue(Builder builder) { + private QualifierValue(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private QualifierValue(boolean noInit) {} - + private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final QualifierValue defaultInstance; public static QualifierValue getDefaultInstance() { return defaultInstance; } - + public QualifierValue getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private QualifierValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + qualifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + deleteType_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public QualifierValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QualifierValue(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes qualifier = 1; public static final int QUALIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 1; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes qualifier = 1; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + // optional bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; + /** + * optional bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + // optional uint64 timestamp = 3; public static final int TIMESTAMP_FIELD_NUMBER = 3; private long timestamp_; + /** + * optional uint64 timestamp = 3; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 timestamp = 3; + */ public long getTimestamp() { return timestamp_; } - + // optional .MutationProto.DeleteType deleteType = 4; public static final int DELETETYPE_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_; + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public boolean hasDeleteType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { return deleteType_; } - + private void initFields() { qualifier_ = com.google.protobuf.ByteString.EMPTY; value_ = com.google.protobuf.ByteString.EMPTY; @@ -6732,11 +9356,11 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6754,12 +9378,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6781,14 +9405,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6798,7 +9422,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj; - + boolean result = true; result = result && (hasQualifier() == other.hasQualifier()); if (hasQualifier()) { @@ -6824,9 +9448,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasQualifier()) { @@ -6846,89 +9474,79 @@ public final class ClientProtos { hash = (53 * hash) + hashEnum(getDeleteType()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MutationProto.ColumnValue.QualifierValue} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder { @@ -6936,18 +9554,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6958,7 +9579,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); qualifier_ = com.google.protobuf.ByteString.EMPTY; @@ -6971,20 +9592,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial(); if (!result.isInitialized()) { @@ -6992,17 +9613,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this); int from_bitField0_ = bitField0_; @@ -7027,7 +9638,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other); @@ -7036,7 +9647,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this; if (other.hasQualifier()) { @@ -7054,74 +9665,47 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - qualifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - deleteType_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes qualifier = 1; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 1; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes qualifier = 1; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * optional bytes qualifier = 1; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7131,21 +9715,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional bytes qualifier = 1; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000001); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // optional bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * optional bytes value = 2; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7155,42 +9751,66 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional bytes value = 2; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - + // optional uint64 timestamp = 3; private long timestamp_ ; + /** + * optional uint64 timestamp = 3; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 timestamp = 3; + */ public long getTimestamp() { return timestamp_; } + /** + * optional uint64 timestamp = 3; + */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000004; timestamp_ = value; onChanged(); return this; } + /** + * optional uint64 timestamp = 3; + */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000004); timestamp_ = 0L; onChanged(); return this; } - + // optional .MutationProto.DeleteType deleteType = 4; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public boolean hasDeleteType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { return deleteType_; } + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) { if (value == null) { throw new NullPointerException(); @@ -7200,56 +9820,80 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional .MutationProto.DeleteType deleteType = 4; + */ public Builder clearDeleteType() { bitField0_ = (bitField0_ & ~0x00000008); deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue.QualifierValue) } - + static { defaultInstance = new QualifierValue(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue.QualifierValue) } - + private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; private java.util.List qualifierValue_; + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public java.util.List getQualifierValueList() { return qualifierValue_; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public java.util.List getQualifierValueOrBuilderList() { return qualifierValue_; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public int getQualifierValueCount() { return qualifierValue_.size(); } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { return qualifierValue_.get(index); } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { return qualifierValue_.get(index); } - + private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; qualifierValue_ = java.util.Collections.emptyList(); @@ -7258,7 +9902,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFamily()) { memoizedIsInitialized = 0; return false; @@ -7266,7 +9910,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7278,12 +9922,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7297,14 +9941,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7314,7 +9958,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj; - + boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { @@ -7327,9 +9971,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { @@ -7341,89 +9989,79 @@ public final class ClientProtos { hash = (53 * hash) + getQualifierValueList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MutationProto.ColumnValue} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder { @@ -7431,18 +10069,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7454,7 +10095,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -7467,20 +10108,20 @@ public final class ClientProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial(); if (!result.isInitialized()) { @@ -7488,17 +10129,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this); int from_bitField0_ = bitField0_; @@ -7520,7 +10151,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other); @@ -7529,7 +10160,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this; if (other.hasFamily()) { @@ -7564,7 +10195,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFamily()) { @@ -7572,55 +10203,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addQualifierValue(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 1; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7630,13 +10249,16 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes family = 1; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; private java.util.List qualifierValue_ = java.util.Collections.emptyList(); @@ -7646,10 +10268,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; - + + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public java.util.List getQualifierValueList() { if (qualifierValueBuilder_ == null) { return java.util.Collections.unmodifiableList(qualifierValue_); @@ -7657,6 +10282,9 @@ public final class ClientProtos { return qualifierValueBuilder_.getMessageList(); } } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public int getQualifierValueCount() { if (qualifierValueBuilder_ == null) { return qualifierValue_.size(); @@ -7664,6 +10292,9 @@ public final class ClientProtos { return qualifierValueBuilder_.getCount(); } } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { if (qualifierValueBuilder_ == null) { return qualifierValue_.get(index); @@ -7671,6 +10302,9 @@ public final class ClientProtos { return qualifierValueBuilder_.getMessage(index); } } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { @@ -7685,6 +10319,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { @@ -7696,6 +10333,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { @@ -7709,6 +10349,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { @@ -7723,6 +10366,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder addQualifierValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { @@ -7734,6 +10380,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { @@ -7745,6 +10394,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder addAllQualifierValue( java.lang.Iterable values) { if (qualifierValueBuilder_ == null) { @@ -7756,6 +10408,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder clearQualifierValue() { if (qualifierValueBuilder_ == null) { qualifierValue_ = java.util.Collections.emptyList(); @@ -7766,6 +10421,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public Builder removeQualifierValue(int index) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); @@ -7776,10 +10434,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().getBuilder(index); } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { if (qualifierValueBuilder_ == null) { @@ -7787,6 +10451,9 @@ public final class ClientProtos { return qualifierValueBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public java.util.List getQualifierValueOrBuilderList() { if (qualifierValueBuilder_ != null) { @@ -7795,15 +10462,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(qualifierValue_); } } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { return getQualifierValueFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); } + /** + * repeated .MutationProto.ColumnValue.QualifierValue qualifierValue = 2; + */ public java.util.List getQualifierValueBuilderList() { return getQualifierValueFieldBuilder().getBuilderList(); @@ -7822,124 +10498,226 @@ public final class ClientProtos { } return qualifierValueBuilder_; } - + // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue) } - + static { defaultInstance = new ColumnValue(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue) } - + private int bitField0_; // optional bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * optional bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // optional .MutationProto.MutationType mutateType = 2; public static final int MUTATETYPE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_; + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { return mutateType_; } - + // repeated .MutationProto.ColumnValue columnValue = 3; public static final int COLUMNVALUE_FIELD_NUMBER = 3; private java.util.List columnValue_; + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public java.util.List getColumnValueList() { return columnValue_; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public java.util.List getColumnValueOrBuilderList() { return columnValue_; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public int getColumnValueCount() { return columnValue_.size(); } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { return columnValue_.get(index); } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { return columnValue_.get(index); } - + // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } - + // repeated .NameBytesPair attribute = 5; public static final int ATTRIBUTE_FIELD_NUMBER = 5; private java.util.List attribute_; + /** + * repeated .NameBytesPair attribute = 5; + */ public java.util.List getAttributeList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 5; + */ public java.util.List getAttributeOrBuilderList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 5; + */ public int getAttributeCount() { return attribute_.size(); } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } - + // optional bool writeToWAL = 6 [default = true]; public static final int WRITETOWAL_FIELD_NUMBER = 6; private boolean writeToWAL_; + /** + * optional bool writeToWAL = 6 [default = true]; + */ public boolean hasWriteToWAL() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool writeToWAL = 6 [default = true]; + */ public boolean getWriteToWAL() { return writeToWAL_; } - + // optional .TimeRange timeRange = 7; public static final int TIMERANGE_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_; } + /** + * optional .TimeRange timeRange = 7; + * + *
+     * For some mutations, a result may be returned, in which case,
+     * time range can be specified for potential performance gain
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_; } - + // optional int32 associatedCellCount = 8; public static final int ASSOCIATEDCELLCOUNT_FIELD_NUMBER = 8; private int associatedCellCount_; + /** + * optional int32 associatedCellCount = 8; + * + *
+     * The below count is set when the associated cells are NOT
+     * part of this protobuf message; they are passed alongside
+     * and then this Message is a placeholder with metadata.  The
+     * count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional int32 associatedCellCount = 8; + * + *
+     * The below count is set when the associated cells are NOT
+     * part of this protobuf message; they are passed alongside
+     * and then this Message is a placeholder with metadata.  The
+     * count is needed to know how many to peel off the block of Cells as
+     * ours.  NOTE: This is different from the pb managed cellCount of the
+     * 'cell' field above which is non-null when the cells are pb'd.
+     * 
+ */ public int getAssociatedCellCount() { return associatedCellCount_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; @@ -7954,7 +10732,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { memoizedIsInitialized = 0; @@ -7970,7 +10748,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8000,12 +10778,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8043,14 +10821,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8060,7 +10838,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -8100,9 +10878,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -8138,89 +10920,88 @@ public final class ClientProtos { hash = (53 * hash) + getAssociatedCellCount(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MutationProto} + * + *
+     **
+     * A specific mutation inside a mutate request.
+     * It can be an append, increment, put or delete based
+     * on the mutation type.  It can be fully filled in or
+     * only metadata present because data is being carried
+     * elsewhere outside of pb.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder { @@ -8228,18 +11009,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8253,7 +11037,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -8286,20 +11070,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000080); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial(); if (!result.isInitialized()) { @@ -8307,17 +11091,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this); int from_bitField0_ = bitField0_; @@ -8372,7 +11146,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other); @@ -8381,7 +11155,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this; if (other.hasRow()) { @@ -8457,7 +11231,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { @@ -8473,96 +11247,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - mutateType_ = value; - } - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumnValue(subBuilder.buildPartial()); - break; - } - case 32: { - bitField0_ |= 0x00000008; - timestamp_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 48: { - bitField0_ |= 0x00000020; - writeToWAL_ = input.readBool(); - break; - } - case 58: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 64: { - bitField0_ |= 0x00000080; - associatedCellCount_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * optional bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8572,21 +11293,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // optional .MutationProto.MutationType mutateType = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { return mutateType_; } + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) { if (value == null) { throw new NullPointerException(); @@ -8596,13 +11329,16 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional .MutationProto.MutationType mutateType = 2; + */ public Builder clearMutateType() { bitField0_ = (bitField0_ & ~0x00000002); mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; onChanged(); return this; } - + // repeated .MutationProto.ColumnValue columnValue = 3; private java.util.List columnValue_ = java.util.Collections.emptyList(); @@ -8612,10 +11348,13 @@ public final class ClientProtos { bitField0_ |= 0x00000004; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_; - + + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public java.util.List getColumnValueList() { if (columnValueBuilder_ == null) { return java.util.Collections.unmodifiableList(columnValue_); @@ -8623,6 +11362,9 @@ public final class ClientProtos { return columnValueBuilder_.getMessageList(); } } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public int getColumnValueCount() { if (columnValueBuilder_ == null) { return columnValue_.size(); @@ -8630,6 +11372,9 @@ public final class ClientProtos { return columnValueBuilder_.getCount(); } } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { if (columnValueBuilder_ == null) { return columnValue_.get(index); @@ -8637,6 +11382,9 @@ public final class ClientProtos { return columnValueBuilder_.getMessage(index); } } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { @@ -8651,6 +11399,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { @@ -8662,6 +11413,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { @@ -8675,6 +11429,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { @@ -8689,6 +11446,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder addColumnValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { @@ -8700,6 +11460,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { @@ -8711,6 +11474,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder addAllColumnValue( java.lang.Iterable values) { if (columnValueBuilder_ == null) { @@ -8722,6 +11488,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder clearColumnValue() { if (columnValueBuilder_ == null) { columnValue_ = java.util.Collections.emptyList(); @@ -8732,6 +11501,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public Builder removeColumnValue(int index) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); @@ -8742,10 +11514,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder( int index) { return getColumnValueFieldBuilder().getBuilder(index); } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { if (columnValueBuilder_ == null) { @@ -8753,6 +11531,9 @@ public final class ClientProtos { return columnValueBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public java.util.List getColumnValueOrBuilderList() { if (columnValueBuilder_ != null) { @@ -8761,15 +11542,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(columnValue_); } } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() { return getColumnValueFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder( int index) { return getColumnValueFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); } + /** + * repeated .MutationProto.ColumnValue columnValue = 3; + */ public java.util.List getColumnValueBuilderList() { return getColumnValueFieldBuilder().getBuilderList(); @@ -8788,28 +11578,40 @@ public final class ClientProtos { } return columnValueBuilder_; } - + // optional uint64 timestamp = 4; private long timestamp_ ; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } + /** + * optional uint64 timestamp = 4; + */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000008; timestamp_ = value; onChanged(); return this; } + /** + * optional uint64 timestamp = 4; + */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000008); timestamp_ = 0L; onChanged(); return this; } - + // repeated .NameBytesPair attribute = 5; private java.util.List attribute_ = java.util.Collections.emptyList(); @@ -8819,10 +11621,13 @@ public final class ClientProtos { bitField0_ |= 0x00000010; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - + + /** + * repeated .NameBytesPair attribute = 5; + */ public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); @@ -8830,6 +11635,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageList(); } } + /** + * repeated .NameBytesPair attribute = 5; + */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); @@ -8837,6 +11645,9 @@ public final class ClientProtos { return attributeBuilder_.getCount(); } } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); @@ -8844,6 +11655,9 @@ public final class ClientProtos { return attributeBuilder_.getMessage(index); } } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -8858,6 +11672,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -8869,6 +11686,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { @@ -8882,6 +11702,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -8896,6 +11719,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -8907,6 +11733,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -8918,6 +11747,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder addAllAttribute( java.lang.Iterable values) { if (attributeBuilder_ == null) { @@ -8929,6 +11761,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); @@ -8939,6 +11774,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); @@ -8949,10 +11787,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { @@ -8960,6 +11804,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameBytesPair attribute = 5; + */ public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { @@ -8968,15 +11815,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 5; + */ public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); @@ -8995,35 +11851,63 @@ public final class ClientProtos { } return attributeBuilder_; } - + // optional bool writeToWAL = 6 [default = true]; private boolean writeToWAL_ = true; + /** + * optional bool writeToWAL = 6 [default = true]; + */ public boolean hasWriteToWAL() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool writeToWAL = 6 [default = true]; + */ public boolean getWriteToWAL() { return writeToWAL_; } + /** + * optional bool writeToWAL = 6 [default = true]; + */ public Builder setWriteToWAL(boolean value) { bitField0_ |= 0x00000020; writeToWAL_ = value; onChanged(); return this; } + /** + * optional bool writeToWAL = 6 [default = true]; + */ public Builder clearWriteToWAL() { bitField0_ = (bitField0_ & ~0x00000020); writeToWAL_ = true; onChanged(); return this; } - + // optional .TimeRange timeRange = 7; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_; @@ -9031,6 +11915,14 @@ public final class ClientProtos { return timeRangeBuilder_.getMessage(); } } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { @@ -9044,6 +11936,14 @@ public final class ClientProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public Builder setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { @@ -9055,6 +11955,14 @@ public final class ClientProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && @@ -9071,6 +11979,14 @@ public final class ClientProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); @@ -9081,11 +11997,27 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000040); return this; } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000040; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); @@ -9093,6 +12025,14 @@ public final class ClientProtos { return timeRange_; } } + /** + * optional .TimeRange timeRange = 7; + * + *
+       * For some mutations, a result may be returned, in which case,
+       * time range can be specified for potential performance gain
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { @@ -9106,125 +12046,339 @@ public final class ClientProtos { } return timeRangeBuilder_; } - + // optional int32 associatedCellCount = 8; private int associatedCellCount_ ; + /** + * optional int32 associatedCellCount = 8; + * + *
+       * The below count is set when the associated cells are NOT
+       * part of this protobuf message; they are passed alongside
+       * and then this Message is a placeholder with metadata.  The
+       * count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional int32 associatedCellCount = 8; + * + *
+       * The below count is set when the associated cells are NOT
+       * part of this protobuf message; they are passed alongside
+       * and then this Message is a placeholder with metadata.  The
+       * count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public int getAssociatedCellCount() { return associatedCellCount_; } + /** + * optional int32 associatedCellCount = 8; + * + *
+       * The below count is set when the associated cells are NOT
+       * part of this protobuf message; they are passed alongside
+       * and then this Message is a placeholder with metadata.  The
+       * count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000080; associatedCellCount_ = value; onChanged(); return this; } + /** + * optional int32 associatedCellCount = 8; + * + *
+       * The below count is set when the associated cells are NOT
+       * part of this protobuf message; they are passed alongside
+       * and then this Message is a placeholder with metadata.  The
+       * count is needed to know how many to peel off the block of Cells as
+       * ours.  NOTE: This is different from the pb managed cellCount of the
+       * 'cell' field above which is non-null when the cells are pb'd.
+       * 
+ */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000080); associatedCellCount_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MutationProto) } - + static { defaultInstance = new MutationProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MutationProto) } - + public interface MutateRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // required .MutationProto mutation = 2; + /** + * required .MutationProto mutation = 2; + */ boolean hasMutation(); + /** + * required .MutationProto mutation = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(); + /** + * required .MutationProto mutation = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); - + // optional .Condition condition = 3; + /** + * optional .Condition condition = 3; + */ boolean hasCondition(); + /** + * optional .Condition condition = 3; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); + /** + * optional .Condition condition = 3; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); } + /** + * Protobuf type {@code MutateRequest} + * + *
+   **
+   * The mutate request. Perform a single Mutate operation.
+   *
+   * Optionally, you can specify a condition. The mutate
+   * will take place only if the condition is met.  Otherwise,
+   * the mutate will be ignored.  In the response result,
+   * parameter processed is used to indicate if the mutate
+   * actually happened.
+   * 
+ */ public static final class MutateRequest extends com.google.protobuf.GeneratedMessage implements MutateRequestOrBuilder { // Use MutateRequest.newBuilder() to construct. - private MutateRequest(Builder builder) { + private MutateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MutateRequest(boolean noInit) {} - + private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MutateRequest defaultInstance; public static MutateRequest getDefaultInstance() { return defaultInstance; } - + public MutateRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MutateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = mutation_.toBuilder(); + } + mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(mutation_); + mutation_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = condition_.toBuilder(); + } + condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(condition_); + condition_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MutateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // required .MutationProto mutation = 2; public static final int MUTATION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_; + /** + * required .MutationProto mutation = 2; + */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .MutationProto mutation = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { return mutation_; } + /** + * required .MutationProto mutation = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { return mutation_; } - + // optional .Condition condition = 3; public static final int CONDITION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; + /** + * optional .Condition condition = 3; + */ public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .Condition condition = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { return condition_; } + /** + * optional .Condition condition = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { return condition_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); @@ -9234,7 +12388,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -9260,7 +12414,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9275,12 +12429,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9298,14 +12452,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9315,7 +12469,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -9336,9 +12490,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -9354,89 +12512,90 @@ public final class ClientProtos { hash = (53 * hash) + getCondition().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MutateRequest} + * + *
+     **
+     * The mutate request. Perform a single Mutate operation.
+     *
+     * Optionally, you can specify a condition. The mutate
+     * will take place only if the condition is met.  Otherwise,
+     * the mutate will be ignored.  In the response result,
+     * parameter processed is used to indicate if the mutate
+     * actually happened.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { @@ -9444,18 +12603,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9469,7 +12631,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -9492,20 +12654,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); if (!result.isInitialized()) { @@ -9513,17 +12675,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); int from_bitField0_ = bitField0_; @@ -9556,7 +12708,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); @@ -9565,7 +12717,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -9580,7 +12732,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -9606,70 +12758,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(); - if (hasMutation()) { - subBuilder.mergeFrom(getMutation()); - } - input.readMessage(subBuilder, extensionRegistry); - setMutation(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(); - if (hasCondition()) { - subBuilder.mergeFrom(getCondition()); - } - input.readMessage(subBuilder, extensionRegistry); - setCondition(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -9677,6 +12798,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -9690,6 +12814,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -9701,6 +12828,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -9717,6 +12847,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -9727,11 +12860,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -9739,6 +12878,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -9752,14 +12894,20 @@ public final class ClientProtos { } return regionBuilder_; } - + // required .MutationProto mutation = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; + /** + * required .MutationProto mutation = 2; + */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .MutationProto mutation = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { return mutation_; @@ -9767,6 +12915,9 @@ public final class ClientProtos { return mutationBuilder_.getMessage(); } } + /** + * required .MutationProto mutation = 2; + */ public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (value == null) { @@ -9780,6 +12931,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .MutationProto mutation = 2; + */ public Builder setMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationBuilder_ == null) { @@ -9791,6 +12945,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .MutationProto mutation = 2; + */ public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -9807,6 +12964,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .MutationProto mutation = 2; + */ public Builder clearMutation() { if (mutationBuilder_ == null) { mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); @@ -9817,11 +12977,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .MutationProto mutation = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMutationFieldBuilder().getBuilder(); } + /** + * required .MutationProto mutation = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); @@ -9829,6 +12995,9 @@ public final class ClientProtos { return mutation_; } } + /** + * required .MutationProto mutation = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { @@ -9842,14 +13011,20 @@ public final class ClientProtos { } return mutationBuilder_; } - + // optional .Condition condition = 3; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; + /** + * optional .Condition condition = 3; + */ public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .Condition condition = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { return condition_; @@ -9857,6 +13032,9 @@ public final class ClientProtos { return conditionBuilder_.getMessage(); } } + /** + * optional .Condition condition = 3; + */ public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (value == null) { @@ -9870,6 +13048,9 @@ public final class ClientProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .Condition condition = 3; + */ public Builder setCondition( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { if (conditionBuilder_ == null) { @@ -9881,6 +13062,9 @@ public final class ClientProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .Condition condition = 3; + */ public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && @@ -9897,6 +13081,9 @@ public final class ClientProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .Condition condition = 3; + */ public Builder clearCondition() { if (conditionBuilder_ == null) { condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); @@ -9907,11 +13094,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } + /** + * optional .Condition condition = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getConditionFieldBuilder().getBuilder(); } + /** + * optional .Condition condition = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); @@ -9919,6 +13112,9 @@ public final class ClientProtos { return condition_; } } + /** + * optional .Condition condition = 3; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { @@ -9932,82 +13128,208 @@ public final class ClientProtos { } return conditionBuilder_; } - + // @@protoc_insertion_point(builder_scope:MutateRequest) } - + static { defaultInstance = new MutateRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MutateRequest) } - + public interface MutateResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .Result result = 1; + /** + * optional .Result result = 1; + */ boolean hasResult(); + /** + * optional .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + /** + * optional .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - + // optional bool processed = 2; + /** + * optional bool processed = 2; + * + *
+     * used for mutate to indicate processed only
+     * 
+ */ boolean hasProcessed(); + /** + * optional bool processed = 2; + * + *
+     * used for mutate to indicate processed only
+     * 
+ */ boolean getProcessed(); } + /** + * Protobuf type {@code MutateResponse} + */ public static final class MutateResponse extends com.google.protobuf.GeneratedMessage implements MutateResponseOrBuilder { // Use MutateResponse.newBuilder() to construct. - private MutateResponse(Builder builder) { + private MutateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MutateResponse(boolean noInit) {} - + private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MutateResponse defaultInstance; public static MutateResponse getDefaultInstance() { return defaultInstance; } - + public MutateResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MutateResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = result_.toBuilder(); + } + result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(result_); + result_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + processed_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); } - - private int bitField0_; + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MutateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; // optional .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + /** + * optional .Result result = 1; + */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { return result_; } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_; } - + // optional bool processed = 2; public static final int PROCESSED_FIELD_NUMBER = 2; private boolean processed_; + /** + * optional bool processed = 2; + * + *
+     * used for mutate to indicate processed only
+     * 
+ */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool processed = 2; + * + *
+     * used for mutate to indicate processed only
+     * 
+ */ public boolean getProcessed() { return processed_; } - + private void initFields() { result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); processed_ = false; @@ -10016,11 +13338,11 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10032,12 +13354,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -10051,14 +13373,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10068,7 +13390,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; - + boolean result = true; result = result && (hasResult() == other.hasResult()); if (hasResult()) { @@ -10084,9 +13406,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResult()) { @@ -10098,89 +13424,79 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getProcessed()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MutateResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { @@ -10188,18 +13504,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10211,7 +13530,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (resultBuilder_ == null) { @@ -10224,20 +13543,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); if (!result.isInitialized()) { @@ -10245,17 +13564,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); int from_bitField0_ = bitField0_; @@ -10276,7 +13585,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); @@ -10285,7 +13594,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; if (other.hasResult()) { @@ -10297,61 +13606,43 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - processed_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .Result result = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + /** + * optional .Result result = 1; + */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_; @@ -10359,6 +13650,9 @@ public final class ClientProtos { return resultBuilder_.getMessage(); } } + /** + * optional .Result result = 1; + */ public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { @@ -10372,6 +13666,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -10383,6 +13680,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -10399,6 +13699,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result result = 1; + */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); @@ -10409,11 +13712,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } + /** + * optional .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); @@ -10421,6 +13730,9 @@ public final class ClientProtos { return result_; } } + /** + * optional .Result result = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { @@ -10434,295 +13746,709 @@ public final class ClientProtos { } return resultBuilder_; } - + // optional bool processed = 2; private boolean processed_ ; + /** + * optional bool processed = 2; + * + *
+       * used for mutate to indicate processed only
+       * 
+ */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool processed = 2; + * + *
+       * used for mutate to indicate processed only
+       * 
+ */ public boolean getProcessed() { return processed_; } + /** + * optional bool processed = 2; + * + *
+       * used for mutate to indicate processed only
+       * 
+ */ public Builder setProcessed(boolean value) { bitField0_ |= 0x00000002; processed_ = value; onChanged(); return this; } + /** + * optional bool processed = 2; + * + *
+       * used for mutate to indicate processed only
+       * 
+ */ public Builder clearProcessed() { bitField0_ = (bitField0_ & ~0x00000002); processed_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MutateResponse) } - + static { defaultInstance = new MutateResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MutateResponse) } - + public interface ScanOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Column column = 1; + /** + * repeated .Column column = 1; + */ java.util.List getColumnList(); + /** + * repeated .Column column = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + /** + * repeated .Column column = 1; + */ int getColumnCount(); + /** + * repeated .Column column = 1; + */ java.util.List getColumnOrBuilderList(); + /** + * repeated .Column column = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - + // repeated .NameBytesPair attribute = 2; + /** + * repeated .NameBytesPair attribute = 2; + */ java.util.List getAttributeList(); + /** + * repeated .NameBytesPair attribute = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + /** + * repeated .NameBytesPair attribute = 2; + */ int getAttributeCount(); + /** + * repeated .NameBytesPair attribute = 2; + */ java.util.List getAttributeOrBuilderList(); + /** + * repeated .NameBytesPair attribute = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - + // optional bytes startRow = 3; + /** + * optional bytes startRow = 3; + */ boolean hasStartRow(); + /** + * optional bytes startRow = 3; + */ com.google.protobuf.ByteString getStartRow(); - + // optional bytes stopRow = 4; + /** + * optional bytes stopRow = 4; + */ boolean hasStopRow(); + /** + * optional bytes stopRow = 4; + */ com.google.protobuf.ByteString getStopRow(); - + // optional .Filter filter = 5; + /** + * optional .Filter filter = 5; + */ boolean hasFilter(); + /** + * optional .Filter filter = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + /** + * optional .Filter filter = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - + // optional .TimeRange timeRange = 6; + /** + * optional .TimeRange timeRange = 6; + */ boolean hasTimeRange(); + /** + * optional .TimeRange timeRange = 6; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + /** + * optional .TimeRange timeRange = 6; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - + // optional uint32 maxVersions = 7 [default = 1]; + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ boolean hasMaxVersions(); + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ int getMaxVersions(); - + // optional bool cacheBlocks = 8 [default = true]; + /** + * optional bool cacheBlocks = 8 [default = true]; + */ boolean hasCacheBlocks(); + /** + * optional bool cacheBlocks = 8 [default = true]; + */ boolean getCacheBlocks(); - + // optional uint32 batchSize = 9; + /** + * optional uint32 batchSize = 9; + */ boolean hasBatchSize(); + /** + * optional uint32 batchSize = 9; + */ int getBatchSize(); - + // optional uint64 maxResultSize = 10; + /** + * optional uint64 maxResultSize = 10; + */ boolean hasMaxResultSize(); + /** + * optional uint64 maxResultSize = 10; + */ long getMaxResultSize(); - + // optional uint32 storeLimit = 11; + /** + * optional uint32 storeLimit = 11; + */ boolean hasStoreLimit(); + /** + * optional uint32 storeLimit = 11; + */ int getStoreLimit(); - + // optional uint32 storeOffset = 12; + /** + * optional uint32 storeOffset = 12; + */ boolean hasStoreOffset(); + /** + * optional uint32 storeOffset = 12; + */ int getStoreOffset(); - + // optional bool loadColumnFamiliesOnDemand = 13; + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+     * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+     * 
+ */ boolean hasLoadColumnFamiliesOnDemand(); + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+     * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+     * 
+ */ boolean getLoadColumnFamiliesOnDemand(); } + /** + * Protobuf type {@code Scan} + * + *
+   **
+   * Instead of get from a table, you can scan it with optional filters.
+   * You can specify the row key range, time range, the columns/families
+   * to scan and so on.
+   *
+   * This scan is used the first time in a scan request. The response of
+   * the initial scan will return a scanner id, which should be used to
+   * fetch result batches later on before it is closed.
+   * 
+ */ public static final class Scan extends com.google.protobuf.GeneratedMessage implements ScanOrBuilder { // Use Scan.newBuilder() to construct. - private Scan(Builder builder) { + private Scan(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Scan(boolean noInit) {} - + private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Scan defaultInstance; public static Scan getDefaultInstance() { return defaultInstance; } - + public Scan getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Scan( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + column_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + break; + } + case 26: { + bitField0_ |= 0x00000001; + startRow_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000002; + stopRow_ = input.readBytes(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + subBuilder = timeRange_.toBuilder(); + } + timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(timeRange_); + timeRange_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000008; + break; + } + case 56: { + bitField0_ |= 0x00000010; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000020; + cacheBlocks_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000040; + batchSize_ = input.readUInt32(); + break; + } + case 80: { + bitField0_ |= 0x00000080; + maxResultSize_ = input.readUInt64(); + break; + } + case 88: { + bitField0_ |= 0x00000100; + storeLimit_ = input.readUInt32(); + break; + } + case 96: { + bitField0_ |= 0x00000200; + storeOffset_ = input.readUInt32(); + break; + } + case 104: { + bitField0_ |= 0x00000400; + loadColumnFamiliesOnDemand_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + column_ = java.util.Collections.unmodifiableList(column_); + } + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Scan parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Scan(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // repeated .Column column = 1; public static final int COLUMN_FIELD_NUMBER = 1; private java.util.List column_; + /** + * repeated .Column column = 1; + */ public java.util.List getColumnList() { return column_; } + /** + * repeated .Column column = 1; + */ public java.util.List getColumnOrBuilderList() { return column_; } + /** + * repeated .Column column = 1; + */ public int getColumnCount() { return column_.size(); } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } - + // repeated .NameBytesPair attribute = 2; public static final int ATTRIBUTE_FIELD_NUMBER = 2; private java.util.List attribute_; + /** + * repeated .NameBytesPair attribute = 2; + */ public java.util.List getAttributeList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 2; + */ public java.util.List getAttributeOrBuilderList() { return attribute_; } + /** + * repeated .NameBytesPair attribute = 2; + */ public int getAttributeCount() { return attribute_.size(); } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } - + // optional bytes startRow = 3; public static final int STARTROW_FIELD_NUMBER = 3; private com.google.protobuf.ByteString startRow_; + /** + * optional bytes startRow = 3; + */ public boolean hasStartRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes startRow = 3; + */ public com.google.protobuf.ByteString getStartRow() { return startRow_; } - + // optional bytes stopRow = 4; public static final int STOPROW_FIELD_NUMBER = 4; private com.google.protobuf.ByteString stopRow_; + /** + * optional bytes stopRow = 4; + */ public boolean hasStopRow() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes stopRow = 4; + */ public com.google.protobuf.ByteString getStopRow() { return stopRow_; } - + // optional .Filter filter = 5; public static final int FILTER_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + /** + * optional .Filter filter = 5; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .Filter filter = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { return filter_; } + /** + * optional .Filter filter = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } - + // optional .TimeRange timeRange = 6; public static final int TIMERANGE_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + /** + * optional .TimeRange timeRange = 6; + */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .TimeRange timeRange = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_; } + /** + * optional .TimeRange timeRange = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_; } - + // optional uint32 maxVersions = 7 [default = 1]; public static final int MAXVERSIONS_FIELD_NUMBER = 7; private int maxVersions_; + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public int getMaxVersions() { return maxVersions_; } - + // optional bool cacheBlocks = 8 [default = true]; public static final int CACHEBLOCKS_FIELD_NUMBER = 8; private boolean cacheBlocks_; + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public boolean getCacheBlocks() { return cacheBlocks_; } - + // optional uint32 batchSize = 9; public static final int BATCHSIZE_FIELD_NUMBER = 9; private int batchSize_; + /** + * optional uint32 batchSize = 9; + */ public boolean hasBatchSize() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 batchSize = 9; + */ public int getBatchSize() { return batchSize_; } - + // optional uint64 maxResultSize = 10; public static final int MAXRESULTSIZE_FIELD_NUMBER = 10; private long maxResultSize_; + /** + * optional uint64 maxResultSize = 10; + */ public boolean hasMaxResultSize() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint64 maxResultSize = 10; + */ public long getMaxResultSize() { return maxResultSize_; } - + // optional uint32 storeLimit = 11; public static final int STORELIMIT_FIELD_NUMBER = 11; private int storeLimit_; + /** + * optional uint32 storeLimit = 11; + */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint32 storeLimit = 11; + */ public int getStoreLimit() { return storeLimit_; } - + // optional uint32 storeOffset = 12; public static final int STOREOFFSET_FIELD_NUMBER = 12; private int storeOffset_; + /** + * optional uint32 storeOffset = 12; + */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint32 storeOffset = 12; + */ public int getStoreOffset() { return storeOffset_; } - + // optional bool loadColumnFamiliesOnDemand = 13; public static final int LOADCOLUMNFAMILIESONDEMAND_FIELD_NUMBER = 13; private boolean loadColumnFamiliesOnDemand_; + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+     * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+     * 
+ */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00000400) == 0x00000400); } + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+     * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+     * 
+ */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } - + private void initFields() { column_ = java.util.Collections.emptyList(); attribute_ = java.util.Collections.emptyList(); @@ -10742,7 +14468,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { memoizedIsInitialized = 0; @@ -10764,7 +14490,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10809,12 +14535,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < column_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -10872,14 +14598,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10889,7 +14615,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; - + boolean result = true; result = result && getColumnList() .equals(other.getColumnList()); @@ -10954,9 +14680,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getColumnCount() > 0) { @@ -11012,89 +14742,90 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Scan} + * + *
+     **
+     * Instead of get from a table, you can scan it with optional filters.
+     * You can specify the row key range, time range, the columns/families
+     * to scan and so on.
+     *
+     * This scan is used the first time in a scan request. The response of
+     * the initial scan will return a scanner id, which should be used to
+     * fetch result batches later on before it is closed.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { @@ -11102,18 +14833,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11128,7 +14862,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (columnBuilder_ == null) { @@ -11175,20 +14909,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00001000); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); if (!result.isInitialized()) { @@ -11196,17 +14930,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); int from_bitField0_ = bitField0_; @@ -11285,7 +15009,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); @@ -11294,7 +15018,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; if (columnBuilder_ == null) { @@ -11385,7 +15109,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { @@ -11407,111 +15131,26 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - startRow_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - stopRow_ = input.readBytes(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - maxVersions_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - cacheBlocks_ = input.readBool(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - batchSize_ = input.readUInt32(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - maxResultSize_ = input.readUInt64(); - break; - } - case 88: { - bitField0_ |= 0x00000400; - storeLimit_ = input.readUInt32(); - break; - } - case 96: { - bitField0_ |= 0x00000800; - storeOffset_ = input.readUInt32(); - break; - } - case 104: { - bitField0_ |= 0x00001000; - loadColumnFamiliesOnDemand_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Column column = 1; private java.util.List column_ = java.util.Collections.emptyList(); @@ -11521,10 +15160,13 @@ public final class ClientProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - + + /** + * repeated .Column column = 1; + */ public java.util.List getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); @@ -11532,6 +15174,9 @@ public final class ClientProtos { return columnBuilder_.getMessageList(); } } + /** + * repeated .Column column = 1; + */ public int getColumnCount() { if (columnBuilder_ == null) { return column_.size(); @@ -11539,6 +15184,9 @@ public final class ClientProtos { return columnBuilder_.getCount(); } } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); @@ -11546,6 +15194,9 @@ public final class ClientProtos { return columnBuilder_.getMessage(index); } } + /** + * repeated .Column column = 1; + */ public Builder setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { @@ -11560,6 +15211,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -11571,6 +15225,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { @@ -11584,6 +15241,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { @@ -11598,6 +15258,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -11609,6 +15272,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { @@ -11620,6 +15286,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder addAllColumn( java.lang.Iterable values) { if (columnBuilder_ == null) { @@ -11631,6 +15300,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder clearColumn() { if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); @@ -11641,6 +15313,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public Builder removeColumn(int index) { if (columnBuilder_ == null) { ensureColumnIsMutable(); @@ -11651,10 +15326,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { @@ -11662,6 +15343,9 @@ public final class ClientProtos { return columnBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Column column = 1; + */ public java.util.List getColumnOrBuilderList() { if (columnBuilder_ != null) { @@ -11670,15 +15354,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(column_); } } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } + /** + * repeated .Column column = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } + /** + * repeated .Column column = 1; + */ public java.util.List getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); @@ -11697,7 +15390,7 @@ public final class ClientProtos { } return columnBuilder_; } - + // repeated .NameBytesPair attribute = 2; private java.util.List attribute_ = java.util.Collections.emptyList(); @@ -11707,10 +15400,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - + + /** + * repeated .NameBytesPair attribute = 2; + */ public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); @@ -11718,6 +15414,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageList(); } } + /** + * repeated .NameBytesPair attribute = 2; + */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); @@ -11725,6 +15424,9 @@ public final class ClientProtos { return attributeBuilder_.getCount(); } } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); @@ -11732,6 +15434,9 @@ public final class ClientProtos { return attributeBuilder_.getMessage(index); } } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -11746,6 +15451,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -11757,6 +15465,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { @@ -11770,6 +15481,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { @@ -11784,6 +15498,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -11795,6 +15512,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { @@ -11806,6 +15526,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder addAllAttribute( java.lang.Iterable values) { if (attributeBuilder_ == null) { @@ -11817,6 +15540,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); @@ -11827,6 +15553,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); @@ -11837,10 +15566,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { @@ -11848,6 +15583,9 @@ public final class ClientProtos { return attributeBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameBytesPair attribute = 2; + */ public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { @@ -11856,15 +15594,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } + /** + * repeated .NameBytesPair attribute = 2; + */ public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); @@ -11883,15 +15630,24 @@ public final class ClientProtos { } return attributeBuilder_; } - + // optional bytes startRow = 3; private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes startRow = 3; + */ public boolean hasStartRow() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes startRow = 3; + */ public com.google.protobuf.ByteString getStartRow() { return startRow_; } + /** + * optional bytes startRow = 3; + */ public Builder setStartRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11901,21 +15657,33 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional bytes startRow = 3; + */ public Builder clearStartRow() { bitField0_ = (bitField0_ & ~0x00000004); startRow_ = getDefaultInstance().getStartRow(); onChanged(); return this; } - + // optional bytes stopRow = 4; private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes stopRow = 4; + */ public boolean hasStopRow() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bytes stopRow = 4; + */ public com.google.protobuf.ByteString getStopRow() { return stopRow_; } + /** + * optional bytes stopRow = 4; + */ public Builder setStopRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11925,20 +15693,29 @@ public final class ClientProtos { onChanged(); return this; } + /** + * optional bytes stopRow = 4; + */ public Builder clearStopRow() { bitField0_ = (bitField0_ & ~0x00000008); stopRow_ = getDefaultInstance().getStopRow(); onChanged(); return this; } - + // optional .Filter filter = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + /** + * optional .Filter filter = 5; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .Filter filter = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; @@ -11946,6 +15723,9 @@ public final class ClientProtos { return filterBuilder_.getMessage(); } } + /** + * optional .Filter filter = 5; + */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { @@ -11959,6 +15739,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .Filter filter = 5; + */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { @@ -11970,6 +15753,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .Filter filter = 5; + */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -11986,6 +15772,9 @@ public final class ClientProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .Filter filter = 5; + */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); @@ -11996,11 +15785,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * optional .Filter filter = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000010; onChanged(); return getFilterFieldBuilder().getBuilder(); } + /** + * optional .Filter filter = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); @@ -12008,6 +15803,9 @@ public final class ClientProtos { return filter_; } } + /** + * optional .Filter filter = 5; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> getFilterFieldBuilder() { @@ -12021,14 +15819,20 @@ public final class ClientProtos { } return filterBuilder_; } - + // optional .TimeRange timeRange = 6; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + /** + * optional .TimeRange timeRange = 6; + */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional .TimeRange timeRange = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_; @@ -12036,6 +15840,9 @@ public final class ClientProtos { return timeRangeBuilder_.getMessage(); } } + /** + * optional .TimeRange timeRange = 6; + */ public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { @@ -12049,6 +15856,9 @@ public final class ClientProtos { bitField0_ |= 0x00000020; return this; } + /** + * optional .TimeRange timeRange = 6; + */ public Builder setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { @@ -12060,6 +15870,9 @@ public final class ClientProtos { bitField0_ |= 0x00000020; return this; } + /** + * optional .TimeRange timeRange = 6; + */ public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && @@ -12076,6 +15889,9 @@ public final class ClientProtos { bitField0_ |= 0x00000020; return this; } + /** + * optional .TimeRange timeRange = 6; + */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); @@ -12086,11 +15902,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } + /** + * optional .TimeRange timeRange = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000020; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } + /** + * optional .TimeRange timeRange = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); @@ -12098,6 +15920,9 @@ public final class ClientProtos { return timeRange_; } } + /** + * optional .TimeRange timeRange = 6; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { @@ -12111,289 +15936,594 @@ public final class ClientProtos { } return timeRangeBuilder_; } - + // optional uint32 maxVersions = 7 [default = 1]; private int maxVersions_ = 1; + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public int getMaxVersions() { return maxVersions_; } + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public Builder setMaxVersions(int value) { bitField0_ |= 0x00000040; maxVersions_ = value; onChanged(); return this; } + /** + * optional uint32 maxVersions = 7 [default = 1]; + */ public Builder clearMaxVersions() { bitField0_ = (bitField0_ & ~0x00000040); maxVersions_ = 1; onChanged(); return this; } - + // optional bool cacheBlocks = 8 [default = true]; private boolean cacheBlocks_ = true; + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public boolean getCacheBlocks() { return cacheBlocks_; } + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public Builder setCacheBlocks(boolean value) { bitField0_ |= 0x00000080; cacheBlocks_ = value; onChanged(); return this; } + /** + * optional bool cacheBlocks = 8 [default = true]; + */ public Builder clearCacheBlocks() { bitField0_ = (bitField0_ & ~0x00000080); cacheBlocks_ = true; onChanged(); return this; } - + // optional uint32 batchSize = 9; private int batchSize_ ; + /** + * optional uint32 batchSize = 9; + */ public boolean hasBatchSize() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint32 batchSize = 9; + */ public int getBatchSize() { return batchSize_; } + /** + * optional uint32 batchSize = 9; + */ public Builder setBatchSize(int value) { bitField0_ |= 0x00000100; batchSize_ = value; onChanged(); return this; } + /** + * optional uint32 batchSize = 9; + */ public Builder clearBatchSize() { bitField0_ = (bitField0_ & ~0x00000100); batchSize_ = 0; onChanged(); return this; } - + // optional uint64 maxResultSize = 10; private long maxResultSize_ ; + /** + * optional uint64 maxResultSize = 10; + */ public boolean hasMaxResultSize() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint64 maxResultSize = 10; + */ public long getMaxResultSize() { return maxResultSize_; } + /** + * optional uint64 maxResultSize = 10; + */ public Builder setMaxResultSize(long value) { bitField0_ |= 0x00000200; maxResultSize_ = value; onChanged(); return this; } + /** + * optional uint64 maxResultSize = 10; + */ public Builder clearMaxResultSize() { bitField0_ = (bitField0_ & ~0x00000200); maxResultSize_ = 0L; onChanged(); return this; } - + // optional uint32 storeLimit = 11; private int storeLimit_ ; + /** + * optional uint32 storeLimit = 11; + */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000400) == 0x00000400); } + /** + * optional uint32 storeLimit = 11; + */ public int getStoreLimit() { return storeLimit_; } + /** + * optional uint32 storeLimit = 11; + */ public Builder setStoreLimit(int value) { bitField0_ |= 0x00000400; storeLimit_ = value; onChanged(); return this; } + /** + * optional uint32 storeLimit = 11; + */ public Builder clearStoreLimit() { bitField0_ = (bitField0_ & ~0x00000400); storeLimit_ = 0; onChanged(); return this; } - + // optional uint32 storeOffset = 12; private int storeOffset_ ; + /** + * optional uint32 storeOffset = 12; + */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000800) == 0x00000800); } + /** + * optional uint32 storeOffset = 12; + */ public int getStoreOffset() { return storeOffset_; } + /** + * optional uint32 storeOffset = 12; + */ public Builder setStoreOffset(int value) { bitField0_ |= 0x00000800; storeOffset_ = value; onChanged(); return this; } + /** + * optional uint32 storeOffset = 12; + */ public Builder clearStoreOffset() { bitField0_ = (bitField0_ & ~0x00000800); storeOffset_ = 0; onChanged(); return this; } - + // optional bool loadColumnFamiliesOnDemand = 13; private boolean loadColumnFamiliesOnDemand_ ; + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+       * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+       * 
+ */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00001000) == 0x00001000); } + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+       * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+       * 
+ */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+       * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+       * 
+ */ public Builder setLoadColumnFamiliesOnDemand(boolean value) { bitField0_ |= 0x00001000; loadColumnFamiliesOnDemand_ = value; onChanged(); return this; } + /** + * optional bool loadColumnFamiliesOnDemand = 13; + * + *
+       * DO NOT add defaults to loadColumnFamiliesOnDemand. 
+       * 
+ */ public Builder clearLoadColumnFamiliesOnDemand() { bitField0_ = (bitField0_ & ~0x00001000); loadColumnFamiliesOnDemand_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Scan) } - + static { defaultInstance = new Scan(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Scan) } - + public interface ScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .RegionSpecifier region = 1; + /** + * optional .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * optional .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * optional .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional .Scan scan = 2; + /** + * optional .Scan scan = 2; + */ boolean hasScan(); + /** + * optional .Scan scan = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * optional .Scan scan = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - + // optional uint64 scannerId = 3; + /** + * optional uint64 scannerId = 3; + */ boolean hasScannerId(); + /** + * optional uint64 scannerId = 3; + */ long getScannerId(); - + // optional uint32 numberOfRows = 4; + /** + * optional uint32 numberOfRows = 4; + */ boolean hasNumberOfRows(); + /** + * optional uint32 numberOfRows = 4; + */ int getNumberOfRows(); - + // optional bool closeScanner = 5; + /** + * optional bool closeScanner = 5; + */ boolean hasCloseScanner(); + /** + * optional bool closeScanner = 5; + */ boolean getCloseScanner(); - + // optional uint64 nextCallSeq = 6; + /** + * optional uint64 nextCallSeq = 6; + */ boolean hasNextCallSeq(); + /** + * optional uint64 nextCallSeq = 6; + */ long getNextCallSeq(); } + /** + * Protobuf type {@code ScanRequest} + * + *
+   **
+   * A scan request. Initially, it should specify a scan. Later on, you
+   * can use the scanner id returned to fetch result batches with a different
+   * scan request.
+   *
+   * The scanner will remain open if there are more results, and it's not
+   * asked to be closed explicitly.
+   *
+   * You can fetch the results and ask the scanner to be closed to save
+   * a trip if you are not interested in remaining results.
+   * 
+ */ public static final class ScanRequest extends com.google.protobuf.GeneratedMessage implements ScanRequestOrBuilder { // Use ScanRequest.newBuilder() to construct. - private ScanRequest(Builder builder) { + private ScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ScanRequest(boolean noInit) {} - + private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ScanRequest defaultInstance; public static ScanRequest getDefaultInstance() { return defaultInstance; } - + public ScanRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ScanRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 24: { + bitField0_ |= 0x00000004; + scannerId_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + numberOfRows_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + closeScanner_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + nextCallSeq_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ScanRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * optional .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * optional .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional .Scan scan = 2; public static final int SCAN_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * optional .Scan scan = 2; + */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { return scan_; } + /** + * optional .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { return scan_; } - + // optional uint64 scannerId = 3; public static final int SCANNERID_FIELD_NUMBER = 3; private long scannerId_; + /** + * optional uint64 scannerId = 3; + */ public boolean hasScannerId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 scannerId = 3; + */ public long getScannerId() { return scannerId_; } - + // optional uint32 numberOfRows = 4; public static final int NUMBEROFROWS_FIELD_NUMBER = 4; private int numberOfRows_; + /** + * optional uint32 numberOfRows = 4; + */ public boolean hasNumberOfRows() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 numberOfRows = 4; + */ public int getNumberOfRows() { return numberOfRows_; } - + // optional bool closeScanner = 5; public static final int CLOSESCANNER_FIELD_NUMBER = 5; private boolean closeScanner_; + /** + * optional bool closeScanner = 5; + */ public boolean hasCloseScanner() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool closeScanner = 5; + */ public boolean getCloseScanner() { return closeScanner_; } - + // optional uint64 nextCallSeq = 6; public static final int NEXTCALLSEQ_FIELD_NUMBER = 6; private long nextCallSeq_; + /** + * optional uint64 nextCallSeq = 6; + */ public boolean hasNextCallSeq() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint64 nextCallSeq = 6; + */ public long getNextCallSeq() { return nextCallSeq_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); @@ -12406,7 +16536,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasRegion()) { if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; @@ -12422,7 +16552,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12446,12 +16576,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12481,14 +16611,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12498,7 +16628,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -12534,9 +16664,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -12564,89 +16698,92 @@ public final class ClientProtos { hash = (53 * hash) + hashLong(getNextCallSeq()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ScanRequest} + * + *
+     **
+     * A scan request. Initially, it should specify a scan. Later on, you
+     * can use the scanner id returned to fetch result batches with a different
+     * scan request.
+     *
+     * The scanner will remain open if there are more results, and it's not
+     * asked to be closed explicitly.
+     *
+     * You can fetch the results and ask the scanner to be closed to save
+     * a trip if you are not interested in remaining results.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { @@ -12654,18 +16791,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12678,7 +16818,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -12703,20 +16843,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); if (!result.isInitialized()) { @@ -12724,17 +16864,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); int from_bitField0_ = bitField0_; @@ -12775,7 +16905,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); @@ -12784,7 +16914,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -12808,7 +16938,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasRegion()) { if (!getRegion().isInitialized()) { @@ -12824,81 +16954,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); - if (hasScan()) { - subBuilder.mergeFrom(getScan()); - } - input.readMessage(subBuilder, extensionRegistry); - setScan(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - scannerId_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - numberOfRows_ = input.readUInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - closeScanner_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - nextCallSeq_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * optional .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -12906,6 +16994,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * optional .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -12919,6 +17010,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -12930,6 +17024,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -12946,6 +17043,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -12956,11 +17056,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * optional .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -12968,6 +17074,9 @@ public final class ClientProtos { return region_; } } + /** + * optional .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -12981,14 +17090,20 @@ public final class ClientProtos { } return regionBuilder_; } - + // optional .Scan scan = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * optional .Scan scan = 2; + */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { if (scanBuilder_ == null) { return scan_; @@ -12996,6 +17111,9 @@ public final class ClientProtos { return scanBuilder_.getMessage(); } } + /** + * optional .Scan scan = 2; + */ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (value == null) { @@ -13009,6 +17127,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Scan scan = 2; + */ public Builder setScan( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { if (scanBuilder_ == null) { @@ -13020,6 +17141,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Scan scan = 2; + */ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -13036,6 +17160,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Scan scan = 2; + */ public Builder clearScan() { if (scanBuilder_ == null) { scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); @@ -13046,11 +17173,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { bitField0_ |= 0x00000002; onChanged(); return getScanFieldBuilder().getBuilder(); } + /** + * optional .Scan scan = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { if (scanBuilder_ != null) { return scanBuilder_.getMessageOrBuilder(); @@ -13058,6 +17191,9 @@ public final class ClientProtos { return scan_; } } + /** + * optional .Scan scan = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { @@ -13071,221 +17207,446 @@ public final class ClientProtos { } return scanBuilder_; } - + // optional uint64 scannerId = 3; private long scannerId_ ; + /** + * optional uint64 scannerId = 3; + */ public boolean hasScannerId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 scannerId = 3; + */ public long getScannerId() { return scannerId_; } + /** + * optional uint64 scannerId = 3; + */ public Builder setScannerId(long value) { bitField0_ |= 0x00000004; scannerId_ = value; onChanged(); return this; } + /** + * optional uint64 scannerId = 3; + */ public Builder clearScannerId() { bitField0_ = (bitField0_ & ~0x00000004); scannerId_ = 0L; onChanged(); return this; } - + // optional uint32 numberOfRows = 4; private int numberOfRows_ ; + /** + * optional uint32 numberOfRows = 4; + */ public boolean hasNumberOfRows() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 numberOfRows = 4; + */ public int getNumberOfRows() { return numberOfRows_; } + /** + * optional uint32 numberOfRows = 4; + */ public Builder setNumberOfRows(int value) { bitField0_ |= 0x00000008; numberOfRows_ = value; onChanged(); return this; } + /** + * optional uint32 numberOfRows = 4; + */ public Builder clearNumberOfRows() { bitField0_ = (bitField0_ & ~0x00000008); numberOfRows_ = 0; onChanged(); return this; } - + // optional bool closeScanner = 5; private boolean closeScanner_ ; + /** + * optional bool closeScanner = 5; + */ public boolean hasCloseScanner() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool closeScanner = 5; + */ public boolean getCloseScanner() { return closeScanner_; } + /** + * optional bool closeScanner = 5; + */ public Builder setCloseScanner(boolean value) { bitField0_ |= 0x00000010; closeScanner_ = value; onChanged(); return this; } + /** + * optional bool closeScanner = 5; + */ public Builder clearCloseScanner() { bitField0_ = (bitField0_ & ~0x00000010); closeScanner_ = false; onChanged(); return this; } - + // optional uint64 nextCallSeq = 6; private long nextCallSeq_ ; + /** + * optional uint64 nextCallSeq = 6; + */ public boolean hasNextCallSeq() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint64 nextCallSeq = 6; + */ public long getNextCallSeq() { return nextCallSeq_; } + /** + * optional uint64 nextCallSeq = 6; + */ public Builder setNextCallSeq(long value) { bitField0_ |= 0x00000020; nextCallSeq_ = value; onChanged(); return this; } + /** + * optional uint64 nextCallSeq = 6; + */ public Builder clearNextCallSeq() { bitField0_ = (bitField0_ & ~0x00000020); nextCallSeq_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ScanRequest) } - + static { defaultInstance = new ScanRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ScanRequest) } - + public interface ScanResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .Result result = 1; + /** + * repeated .Result result = 1; + */ java.util.List getResultList(); + /** + * repeated .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); + /** + * repeated .Result result = 1; + */ int getResultCount(); + /** + * repeated .Result result = 1; + */ java.util.List getResultOrBuilderList(); + /** + * repeated .Result result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index); - + // optional uint64 scannerId = 2; + /** + * optional uint64 scannerId = 2; + */ boolean hasScannerId(); + /** + * optional uint64 scannerId = 2; + */ long getScannerId(); - + // optional bool moreResults = 3; + /** + * optional bool moreResults = 3; + */ boolean hasMoreResults(); + /** + * optional bool moreResults = 3; + */ boolean getMoreResults(); - + // optional uint32 ttl = 4; + /** + * optional uint32 ttl = 4; + */ boolean hasTtl(); + /** + * optional uint32 ttl = 4; + */ int getTtl(); - + // optional uint64 resultSizeBytes = 5; + /** + * optional uint64 resultSizeBytes = 5; + */ boolean hasResultSizeBytes(); + /** + * optional uint64 resultSizeBytes = 5; + */ long getResultSizeBytes(); } + /** + * Protobuf type {@code ScanResponse} + * + *
+   **
+   * The scan response. If there are no more results, moreResults will
+   * be false.  If it is not specified, it means there are more.
+   * 
+ */ public static final class ScanResponse extends com.google.protobuf.GeneratedMessage implements ScanResponseOrBuilder { // Use ScanResponse.newBuilder() to construct. - private ScanResponse(Builder builder) { + private ScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ScanResponse(boolean noInit) {} - + private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ScanResponse defaultInstance; public static ScanResponse getDefaultInstance() { return defaultInstance; } - + public ScanResponse getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - + private ScanResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); + break; + } + case 16: { + bitField0_ |= 0x00000001; + scannerId_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000002; + moreResults_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000004; + ttl_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000008; + resultSizeBytes_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ScanResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // repeated .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private java.util.List result_; + /** + * repeated .Result result = 1; + */ public java.util.List getResultList() { return result_; } + /** + * repeated .Result result = 1; + */ public java.util.List getResultOrBuilderList() { return result_; } + /** + * repeated .Result result = 1; + */ public int getResultCount() { return result_.size(); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { return result_.get(index); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { return result_.get(index); } - + // optional uint64 scannerId = 2; public static final int SCANNERID_FIELD_NUMBER = 2; private long scannerId_; + /** + * optional uint64 scannerId = 2; + */ public boolean hasScannerId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint64 scannerId = 2; + */ public long getScannerId() { return scannerId_; } - + // optional bool moreResults = 3; public static final int MORERESULTS_FIELD_NUMBER = 3; private boolean moreResults_; + /** + * optional bool moreResults = 3; + */ public boolean hasMoreResults() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool moreResults = 3; + */ public boolean getMoreResults() { return moreResults_; } - + // optional uint32 ttl = 4; public static final int TTL_FIELD_NUMBER = 4; private int ttl_; + /** + * optional uint32 ttl = 4; + */ public boolean hasTtl() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint32 ttl = 4; + */ public int getTtl() { return ttl_; } - + // optional uint64 resultSizeBytes = 5; public static final int RESULTSIZEBYTES_FIELD_NUMBER = 5; private long resultSizeBytes_; + /** + * optional uint64 resultSizeBytes = 5; + */ public boolean hasResultSizeBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 resultSizeBytes = 5; + */ public long getResultSizeBytes() { return resultSizeBytes_; } - + private void initFields() { result_ = java.util.Collections.emptyList(); scannerId_ = 0L; @@ -13297,11 +17658,11 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -13322,12 +17683,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < result_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -13353,14 +17714,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13370,7 +17731,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; - + boolean result = true; result = result && getResultList() .equals(other.getResultList()); @@ -13398,9 +17759,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getResultCount() > 0) { @@ -13424,89 +17789,85 @@ public final class ClientProtos { hash = (53 * hash) + hashLong(getResultSizeBytes()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ScanResponse} + * + *
+     **
+     * The scan response. If there are no more results, moreResults will
+     * be false.  If it is not specified, it means there are more.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder { @@ -13514,18 +17875,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13537,7 +17901,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (resultBuilder_ == null) { @@ -13556,20 +17920,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); if (!result.isInitialized()) { @@ -13577,17 +17941,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this); int from_bitField0_ = bitField0_; @@ -13621,7 +17975,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other); @@ -13630,7 +17984,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; if (resultBuilder_ == null) { @@ -13674,66 +18028,30 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - scannerId_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - moreResults_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - ttl_ = input.readUInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - resultSizeBytes_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .Result result = 1; private java.util.List result_ = java.util.Collections.emptyList(); @@ -13743,10 +18061,13 @@ public final class ClientProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - + + /** + * repeated .Result result = 1; + */ public java.util.List getResultList() { if (resultBuilder_ == null) { return java.util.Collections.unmodifiableList(result_); @@ -13754,6 +18075,9 @@ public final class ClientProtos { return resultBuilder_.getMessageList(); } } + /** + * repeated .Result result = 1; + */ public int getResultCount() { if (resultBuilder_ == null) { return result_.size(); @@ -13761,6 +18085,9 @@ public final class ClientProtos { return resultBuilder_.getCount(); } } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { if (resultBuilder_ == null) { return result_.get(index); @@ -13768,6 +18095,9 @@ public final class ClientProtos { return resultBuilder_.getMessage(index); } } + /** + * repeated .Result result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { @@ -13782,6 +18112,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -13793,6 +18126,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { @@ -13806,6 +18142,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { @@ -13820,6 +18159,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -13831,6 +18173,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { @@ -13842,6 +18187,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder addAllResult( java.lang.Iterable values) { if (resultBuilder_ == null) { @@ -13853,6 +18201,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = java.util.Collections.emptyList(); @@ -13863,6 +18214,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public Builder removeResult(int index) { if (resultBuilder_ == null) { ensureResultIsMutable(); @@ -13873,10 +18227,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( int index) { return getResultFieldBuilder().getBuilder(index); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { if (resultBuilder_ == null) { @@ -13884,6 +18244,9 @@ public final class ClientProtos { return resultBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Result result = 1; + */ public java.util.List getResultOrBuilderList() { if (resultBuilder_ != null) { @@ -13892,15 +18255,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(result_); } } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { return getResultFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } + /** + * repeated .Result result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( int index) { return getResultFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } + /** + * repeated .Result result = 1; + */ public java.util.List getResultBuilderList() { return getResultFieldBuilder().getBuilderList(); @@ -13919,234 +18291,515 @@ public final class ClientProtos { } return resultBuilder_; } - + // optional uint64 scannerId = 2; private long scannerId_ ; + /** + * optional uint64 scannerId = 2; + */ public boolean hasScannerId() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 scannerId = 2; + */ public long getScannerId() { return scannerId_; } + /** + * optional uint64 scannerId = 2; + */ public Builder setScannerId(long value) { bitField0_ |= 0x00000002; scannerId_ = value; onChanged(); return this; } + /** + * optional uint64 scannerId = 2; + */ public Builder clearScannerId() { bitField0_ = (bitField0_ & ~0x00000002); scannerId_ = 0L; onChanged(); return this; } - + // optional bool moreResults = 3; private boolean moreResults_ ; + /** + * optional bool moreResults = 3; + */ public boolean hasMoreResults() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool moreResults = 3; + */ public boolean getMoreResults() { return moreResults_; } + /** + * optional bool moreResults = 3; + */ public Builder setMoreResults(boolean value) { bitField0_ |= 0x00000004; moreResults_ = value; onChanged(); return this; } + /** + * optional bool moreResults = 3; + */ public Builder clearMoreResults() { bitField0_ = (bitField0_ & ~0x00000004); moreResults_ = false; onChanged(); return this; } - + // optional uint32 ttl = 4; private int ttl_ ; + /** + * optional uint32 ttl = 4; + */ public boolean hasTtl() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 ttl = 4; + */ public int getTtl() { return ttl_; } + /** + * optional uint32 ttl = 4; + */ public Builder setTtl(int value) { bitField0_ |= 0x00000008; ttl_ = value; onChanged(); return this; } + /** + * optional uint32 ttl = 4; + */ public Builder clearTtl() { bitField0_ = (bitField0_ & ~0x00000008); ttl_ = 0; onChanged(); return this; } - + // optional uint64 resultSizeBytes = 5; private long resultSizeBytes_ ; + /** + * optional uint64 resultSizeBytes = 5; + */ public boolean hasResultSizeBytes() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint64 resultSizeBytes = 5; + */ public long getResultSizeBytes() { return resultSizeBytes_; } + /** + * optional uint64 resultSizeBytes = 5; + */ public Builder setResultSizeBytes(long value) { bitField0_ |= 0x00000010; resultSizeBytes_ = value; onChanged(); return this; } + /** + * optional uint64 resultSizeBytes = 5; + */ public Builder clearResultSizeBytes() { bitField0_ = (bitField0_ & ~0x00000010); resultSizeBytes_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ScanResponse) } - + static { defaultInstance = new ScanResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ScanResponse) } - + public interface BulkLoadHFileRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ java.util.List getFamilyPathList(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ int getFamilyPathCount(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ java.util.List getFamilyPathOrBuilderList(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index); - + // optional bool assignSeqNum = 3; + /** + * optional bool assignSeqNum = 3; + */ boolean hasAssignSeqNum(); + /** + * optional bool assignSeqNum = 3; + */ boolean getAssignSeqNum(); } + /** + * Protobuf type {@code BulkLoadHFileRequest} + * + *
+   **
+   * Atomically bulk load multiple HFiles (say from different column families)
+   * into an open region.
+   * 
+ */ public static final class BulkLoadHFileRequest extends com.google.protobuf.GeneratedMessage implements BulkLoadHFileRequestOrBuilder { // Use BulkLoadHFileRequest.newBuilder() to construct. - private BulkLoadHFileRequest(Builder builder) { + private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BulkLoadHFileRequest(boolean noInit) {} - + private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BulkLoadHFileRequest defaultInstance; public static BulkLoadHFileRequest getDefaultInstance() { return defaultInstance; } - + public BulkLoadHFileRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BulkLoadHFileRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); + break; + } + case 24: { + bitField0_ |= 0x00000002; + assignSeqNum_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BulkLoadHFileRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadHFileRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + public interface FamilyPathOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes family = 1; + /** + * required bytes family = 1; + */ boolean hasFamily(); + /** + * required bytes family = 1; + */ com.google.protobuf.ByteString getFamily(); - + // required string path = 2; + /** + * required string path = 2; + */ boolean hasPath(); - String getPath(); - } + /** + * required string path = 2; + */ + java.lang.String getPath(); + /** + * required string path = 2; + */ + com.google.protobuf.ByteString + getPathBytes(); + } + /** + * Protobuf type {@code BulkLoadHFileRequest.FamilyPath} + */ public static final class FamilyPath extends com.google.protobuf.GeneratedMessage implements FamilyPathOrBuilder { // Use FamilyPath.newBuilder() to construct. - private FamilyPath(Builder builder) { + private FamilyPath(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FamilyPath(boolean noInit) {} - + private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FamilyPath defaultInstance; public static FamilyPath getDefaultInstance() { return defaultInstance; } - + public FamilyPath getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FamilyPath( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + path_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FamilyPath parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyPath(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // required string path = 2; public static final int PATH_FIELD_NUMBER = 2; private java.lang.Object path_; + /** + * required string path = 2; + */ public boolean hasPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getPath() { + /** + * required string path = 2; + */ + public java.lang.String getPath() { java.lang.Object ref = path_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { path_ = s; } return s; } } - private com.google.protobuf.ByteString getPathBytes() { + /** + * required string path = 2; + */ + public com.google.protobuf.ByteString + getPathBytes() { java.lang.Object ref = path_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; path_ = ""; @@ -14155,7 +18808,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFamily()) { memoizedIsInitialized = 0; return false; @@ -14167,7 +18820,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -14179,12 +18832,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -14198,14 +18851,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14215,7 +18868,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; - + boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { @@ -14231,9 +18884,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { @@ -14245,89 +18902,79 @@ public final class ClientProtos { hash = (53 * hash) + getPath().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BulkLoadHFileRequest.FamilyPath} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { @@ -14335,18 +18982,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14357,7 +19007,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -14366,20 +19016,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); if (!result.isInitialized()) { @@ -14387,17 +19037,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); int from_bitField0_ = bitField0_; @@ -14414,7 +19054,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); @@ -14423,19 +19063,21 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (other.hasPath()) { - setPath(other.getPath()); + bitField0_ |= 0x00000002; + path_ = other.path_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFamily()) { @@ -14447,54 +19089,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - path_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 1; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -14504,29 +19135,59 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes family = 1; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // required string path = 2; private java.lang.Object path_ = ""; + /** + * required string path = 2; + */ public boolean hasPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getPath() { + /** + * required string path = 2; + */ + public java.lang.String getPath() { java.lang.Object ref = path_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); path_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setPath(String value) { + /** + * required string path = 2; + */ + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string path = 2; + */ + public Builder setPath( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -14535,74 +19196,115 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required string path = 2; + */ public Builder clearPath() { bitField0_ = (bitField0_ & ~0x00000002); path_ = getDefaultInstance().getPath(); onChanged(); return this; } - void setPath(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * required string path = 2; + */ + public Builder setPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; path_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) } - + static { defaultInstance = new FamilyPath(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; public static final int FAMILYPATH_FIELD_NUMBER = 2; private java.util.List familyPath_; + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public java.util.List getFamilyPathList() { return familyPath_; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public java.util.List getFamilyPathOrBuilderList() { return familyPath_; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public int getFamilyPathCount() { return familyPath_.size(); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { return familyPath_.get(index); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { return familyPath_.get(index); } - + // optional bool assignSeqNum = 3; public static final int ASSIGNSEQNUM_FIELD_NUMBER = 3; private boolean assignSeqNum_; + /** + * optional bool assignSeqNum = 3; + */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool assignSeqNum = 3; + */ public boolean getAssignSeqNum() { return assignSeqNum_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); familyPath_ = java.util.Collections.emptyList(); @@ -14612,7 +19314,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -14630,7 +19332,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -14645,12 +19347,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -14668,14 +19370,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14685,7 +19387,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -14703,9 +19405,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -14721,89 +19427,85 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getAssignSeqNum()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BulkLoadHFileRequest} + * + *
+     **
+     * Atomically bulk load multiple HFiles (say from different column families)
+     * into an open region.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { @@ -14811,18 +19513,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14835,7 +19540,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -14854,20 +19559,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); if (!result.isInitialized()) { @@ -14875,17 +19580,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); int from_bitField0_ = bitField0_; @@ -14915,7 +19610,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); @@ -14924,7 +19619,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -14962,7 +19657,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -14980,63 +19675,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyPath(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - assignSeqNum_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -15044,6 +19715,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -15057,6 +19731,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -15068,6 +19745,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -15084,6 +19764,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -15094,11 +19777,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -15106,6 +19795,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -15119,7 +19811,7 @@ public final class ClientProtos { } return regionBuilder_; } - + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; private java.util.List familyPath_ = java.util.Collections.emptyList(); @@ -15129,10 +19821,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - + + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public java.util.List getFamilyPathList() { if (familyPathBuilder_ == null) { return java.util.Collections.unmodifiableList(familyPath_); @@ -15140,6 +19835,9 @@ public final class ClientProtos { return familyPathBuilder_.getMessageList(); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public int getFamilyPathCount() { if (familyPathBuilder_ == null) { return familyPath_.size(); @@ -15147,6 +19845,9 @@ public final class ClientProtos { return familyPathBuilder_.getCount(); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); @@ -15154,6 +19855,9 @@ public final class ClientProtos { return familyPathBuilder_.getMessage(index); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { @@ -15168,6 +19872,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -15179,6 +19886,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { @@ -15192,6 +19902,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { @@ -15206,6 +19919,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder addFamilyPath( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -15217,6 +19933,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -15228,6 +19947,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder addAllFamilyPath( java.lang.Iterable values) { if (familyPathBuilder_ == null) { @@ -15239,6 +19961,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder clearFamilyPath() { if (familyPathBuilder_ == null) { familyPath_ = java.util.Collections.emptyList(); @@ -15249,6 +19974,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public Builder removeFamilyPath(int index) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); @@ -15259,10 +19987,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().getBuilder(index); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { if (familyPathBuilder_ == null) { @@ -15270,6 +20004,9 @@ public final class ClientProtos { return familyPathBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public java.util.List getFamilyPathOrBuilderList() { if (familyPathBuilder_ != null) { @@ -15278,15 +20015,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(familyPath_); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { return getFamilyPathFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + */ public java.util.List getFamilyPathBuilderList() { return getFamilyPathFieldBuilder().getBuilderList(); @@ -15305,85 +20051,176 @@ public final class ClientProtos { } return familyPathBuilder_; } - + // optional bool assignSeqNum = 3; private boolean assignSeqNum_ ; + /** + * optional bool assignSeqNum = 3; + */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool assignSeqNum = 3; + */ public boolean getAssignSeqNum() { return assignSeqNum_; } + /** + * optional bool assignSeqNum = 3; + */ public Builder setAssignSeqNum(boolean value) { bitField0_ |= 0x00000004; assignSeqNum_ = value; onChanged(); return this; } + /** + * optional bool assignSeqNum = 3; + */ public Builder clearAssignSeqNum() { bitField0_ = (bitField0_ & ~0x00000004); assignSeqNum_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) } - + static { defaultInstance = new BulkLoadHFileRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) } - + public interface BulkLoadHFileResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool loaded = 1; + /** + * required bool loaded = 1; + */ boolean hasLoaded(); + /** + * required bool loaded = 1; + */ boolean getLoaded(); } + /** + * Protobuf type {@code BulkLoadHFileResponse} + */ public static final class BulkLoadHFileResponse extends com.google.protobuf.GeneratedMessage implements BulkLoadHFileResponseOrBuilder { // Use BulkLoadHFileResponse.newBuilder() to construct. - private BulkLoadHFileResponse(Builder builder) { + private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BulkLoadHFileResponse(boolean noInit) {} - + private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BulkLoadHFileResponse defaultInstance; public static BulkLoadHFileResponse getDefaultInstance() { return defaultInstance; } - + public BulkLoadHFileResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BulkLoadHFileResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BulkLoadHFileResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadHFileResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required bool loaded = 1; public static final int LOADED_FIELD_NUMBER = 1; private boolean loaded_; + /** + * required bool loaded = 1; + */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool loaded = 1; + */ public boolean getLoaded() { return loaded_; } - + private void initFields() { loaded_ = false; } @@ -15391,7 +20228,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLoaded()) { memoizedIsInitialized = 0; return false; @@ -15399,7 +20236,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15408,12 +20245,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15423,14 +20260,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15440,7 +20277,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; - + boolean result = true; result = result && (hasLoaded() == other.hasLoaded()); if (hasLoaded()) { @@ -15451,9 +20288,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLoaded()) { @@ -15461,89 +20302,79 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getLoaded()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BulkLoadHFileResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { @@ -15551,18 +20382,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -15573,27 +20407,27 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); loaded_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); if (!result.isInitialized()) { @@ -15601,17 +20435,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); int from_bitField0_ = bitField0_; @@ -15624,7 +20448,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); @@ -15633,7 +20457,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; if (other.hasLoaded()) { @@ -15642,7 +20466,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLoaded()) { @@ -15650,205 +20474,352 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - loaded_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool loaded = 1; private boolean loaded_ ; + /** + * required bool loaded = 1; + */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool loaded = 1; + */ public boolean getLoaded() { return loaded_; } + /** + * required bool loaded = 1; + */ public Builder setLoaded(boolean value) { bitField0_ |= 0x00000001; loaded_ = value; onChanged(); return this; } + /** + * required bool loaded = 1; + */ public Builder clearLoaded() { bitField0_ = (bitField0_ & ~0x00000001); loaded_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) } - + static { defaultInstance = new BulkLoadHFileResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) } - + public interface CoprocessorServiceCallOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // required string serviceName = 2; + /** + * required string serviceName = 2; + */ boolean hasServiceName(); - String getServiceName(); - + /** + * required string serviceName = 2; + */ + java.lang.String getServiceName(); + /** + * required string serviceName = 2; + */ + com.google.protobuf.ByteString + getServiceNameBytes(); + // required string methodName = 3; + /** + * required string methodName = 3; + */ boolean hasMethodName(); - String getMethodName(); - + /** + * required string methodName = 3; + */ + java.lang.String getMethodName(); + /** + * required string methodName = 3; + */ + com.google.protobuf.ByteString + getMethodNameBytes(); + // required bytes request = 4; + /** + * required bytes request = 4; + */ boolean hasRequest(); + /** + * required bytes request = 4; + */ com.google.protobuf.ByteString getRequest(); } + /** + * Protobuf type {@code CoprocessorServiceCall} + */ public static final class CoprocessorServiceCall extends com.google.protobuf.GeneratedMessage implements CoprocessorServiceCallOrBuilder { // Use CoprocessorServiceCall.newBuilder() to construct. - private CoprocessorServiceCall(Builder builder) { + private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceCall(boolean noInit) {} - + private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CoprocessorServiceCall defaultInstance; public static CoprocessorServiceCall getDefaultInstance() { return defaultInstance; } - + public CoprocessorServiceCall getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CoprocessorServiceCall( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serviceName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + request_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CoprocessorServiceCall parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceCall(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // required string serviceName = 2; public static final int SERVICENAME_FIELD_NUMBER = 2; private java.lang.Object serviceName_; + /** + * required string serviceName = 2; + */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getServiceName() { + /** + * required string serviceName = 2; + */ + public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { serviceName_ = s; } return s; } } - private com.google.protobuf.ByteString getServiceNameBytes() { + /** + * required string serviceName = 2; + */ + public com.google.protobuf.ByteString + getServiceNameBytes() { java.lang.Object ref = serviceName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); serviceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // required string methodName = 3; public static final int METHODNAME_FIELD_NUMBER = 3; private java.lang.Object methodName_; + /** + * required string methodName = 3; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getMethodName() { + /** + * required string methodName = 3; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { methodName_ = s; } return s; } } - private com.google.protobuf.ByteString getMethodNameBytes() { + /** + * required string methodName = 3; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { java.lang.Object ref = methodName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // required bytes request = 4; public static final int REQUEST_FIELD_NUMBER = 4; private com.google.protobuf.ByteString request_; + /** + * required bytes request = 4; + */ public boolean hasRequest() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required bytes request = 4; + */ public com.google.protobuf.ByteString getRequest() { return request_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; serviceName_ = ""; @@ -15859,7 +20830,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -15879,7 +20850,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15897,12 +20868,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15924,14 +20895,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15941,7 +20912,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -15967,9 +20938,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -15989,89 +20964,79 @@ public final class ClientProtos { hash = (53 * hash) + getRequest().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CoprocessorServiceCall} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { @@ -16079,18 +21044,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16101,7 +21069,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -16114,20 +21082,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); if (!result.isInitialized()) { @@ -16135,17 +21103,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this); int from_bitField0_ = bitField0_; @@ -16170,7 +21128,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other); @@ -16179,17 +21137,21 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } if (other.hasServiceName()) { - setServiceName(other.getServiceName()); + bitField0_ |= 0x00000002; + serviceName_ = other.serviceName_; + onChanged(); } if (other.hasMethodName()) { - setMethodName(other.getMethodName()); + bitField0_ |= 0x00000004; + methodName_ = other.methodName_; + onChanged(); } if (other.hasRequest()) { setRequest(other.getRequest()); @@ -16197,7 +21159,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -16217,64 +21179,43 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serviceName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - request_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -16284,29 +21225,59 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // required string serviceName = 2; private java.lang.Object serviceName_ = ""; + /** + * required string serviceName = 2; + */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getServiceName() { + /** + * required string serviceName = 2; + */ + public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); serviceName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setServiceName(String value) { + /** + * required string serviceName = 2; + */ + public com.google.protobuf.ByteString + getServiceNameBytes() { + java.lang.Object ref = serviceName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + serviceName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string serviceName = 2; + */ + public Builder setServiceName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -16315,34 +21286,72 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required string serviceName = 2; + */ public Builder clearServiceName() { bitField0_ = (bitField0_ & ~0x00000002); serviceName_ = getDefaultInstance().getServiceName(); onChanged(); return this; } - void setServiceName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * required string serviceName = 2; + */ + public Builder setServiceNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); + return this; } - + // required string methodName = 3; private java.lang.Object methodName_ = ""; + /** + * required string methodName = 3; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getMethodName() { + /** + * required string methodName = 3; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); methodName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setMethodName(String value) { + /** + * required string methodName = 3; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string methodName = 3; + */ + public Builder setMethodName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -16351,26 +21360,46 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required string methodName = 3; + */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000004); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * required string methodName = 3; + */ + public Builder setMethodNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; methodName_ = value; onChanged(); + return this; } - + // required bytes request = 4; private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes request = 4; + */ public boolean hasRequest() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required bytes request = 4; + */ public com.google.protobuf.ByteString getRequest() { return request_; } + /** + * required bytes request = 4; + */ public Builder setRequest(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -16380,92 +21409,219 @@ public final class ClientProtos { onChanged(); return this; } + /** + * required bytes request = 4; + */ public Builder clearRequest() { bitField0_ = (bitField0_ & ~0x00000008); request_ = getDefaultInstance().getRequest(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CoprocessorServiceCall) } - + static { defaultInstance = new CoprocessorServiceCall(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CoprocessorServiceCall) } - + public interface CoprocessorServiceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // required .CoprocessorServiceCall call = 2; + /** + * required .CoprocessorServiceCall call = 2; + */ boolean hasCall(); + /** + * required .CoprocessorServiceCall call = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall(); + /** + * required .CoprocessorServiceCall call = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder(); } + /** + * Protobuf type {@code CoprocessorServiceRequest} + */ public static final class CoprocessorServiceRequest extends com.google.protobuf.GeneratedMessage implements CoprocessorServiceRequestOrBuilder { // Use CoprocessorServiceRequest.newBuilder() to construct. - private CoprocessorServiceRequest(Builder builder) { + private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceRequest(boolean noInit) {} - + private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CoprocessorServiceRequest defaultInstance; public static CoprocessorServiceRequest getDefaultInstance() { return defaultInstance; } - + public CoprocessorServiceRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CoprocessorServiceRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = call_.toBuilder(); + } + call_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(call_); + call_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CoprocessorServiceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // required .CoprocessorServiceCall call = 2; public static final int CALL_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_; + /** + * required .CoprocessorServiceCall call = 2; + */ public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .CoprocessorServiceCall call = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { return call_; } + /** + * required .CoprocessorServiceCall call = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { return call_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); @@ -16474,7 +21630,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -16494,7 +21650,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -16506,12 +21662,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -16525,14 +21681,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -16542,7 +21698,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -16558,9 +21714,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -16572,89 +21732,79 @@ public final class ClientProtos { hash = (53 * hash) + getCall().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CoprocessorServiceRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { @@ -16662,18 +21812,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16686,7 +21839,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -16703,20 +21856,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); if (!result.isInitialized()) { @@ -16724,17 +21877,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this); int from_bitField0_ = bitField0_; @@ -16759,7 +21902,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other); @@ -16768,7 +21911,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -16780,7 +21923,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -16800,61 +21943,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(); - if (hasCall()) { - subBuilder.mergeFrom(getCall()); - } - input.readMessage(subBuilder, extensionRegistry); - setCall(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -16862,6 +21983,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -16875,6 +21999,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -16886,6 +22013,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -16902,6 +22032,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -16912,11 +22045,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -16924,6 +22063,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -16937,14 +22079,20 @@ public final class ClientProtos { } return regionBuilder_; } - + // required .CoprocessorServiceCall call = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_; + /** + * required .CoprocessorServiceCall call = 2; + */ public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .CoprocessorServiceCall call = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { if (callBuilder_ == null) { return call_; @@ -16952,6 +22100,9 @@ public final class ClientProtos { return callBuilder_.getMessage(); } } + /** + * required .CoprocessorServiceCall call = 2; + */ public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (callBuilder_ == null) { if (value == null) { @@ -16965,6 +22116,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .CoprocessorServiceCall call = 2; + */ public Builder setCall( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) { if (callBuilder_ == null) { @@ -16976,6 +22130,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .CoprocessorServiceCall call = 2; + */ public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (callBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -16992,6 +22149,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .CoprocessorServiceCall call = 2; + */ public Builder clearCall() { if (callBuilder_ == null) { call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); @@ -17002,11 +22162,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .CoprocessorServiceCall call = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCallFieldBuilder().getBuilder(); } + /** + * required .CoprocessorServiceCall call = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { if (callBuilder_ != null) { return callBuilder_.getMessageOrBuilder(); @@ -17014,6 +22180,9 @@ public final class ClientProtos { return call_; } } + /** + * required .CoprocessorServiceCall call = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> getCallFieldBuilder() { @@ -17027,86 +22196,210 @@ public final class ClientProtos { } return callBuilder_; } - + // @@protoc_insertion_point(builder_scope:CoprocessorServiceRequest) } - + static { defaultInstance = new CoprocessorServiceRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CoprocessorServiceRequest) } - + public interface CoprocessorServiceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // required .NameBytesPair value = 2; + /** + * required .NameBytesPair value = 2; + */ boolean hasValue(); + /** + * required .NameBytesPair value = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + /** + * required .NameBytesPair value = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); } + /** + * Protobuf type {@code CoprocessorServiceResponse} + */ public static final class CoprocessorServiceResponse extends com.google.protobuf.GeneratedMessage implements CoprocessorServiceResponseOrBuilder { // Use CoprocessorServiceResponse.newBuilder() to construct. - private CoprocessorServiceResponse(Builder builder) { + private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceResponse(boolean noInit) {} - + private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CoprocessorServiceResponse defaultInstance; public static CoprocessorServiceResponse getDefaultInstance() { return defaultInstance; } - + public CoprocessorServiceResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CoprocessorServiceResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = value_.toBuilder(); + } + value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CoprocessorServiceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // required .NameBytesPair value = 2; public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + /** + * required .NameBytesPair value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .NameBytesPair value = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { return value_; } + /** + * required .NameBytesPair value = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { return value_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); @@ -17115,7 +22408,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -17135,7 +22428,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -17147,12 +22440,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -17166,14 +22459,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -17183,7 +22476,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -17199,9 +22492,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -17213,89 +22510,79 @@ public final class ClientProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CoprocessorServiceResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { @@ -17303,18 +22590,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -17327,7 +22617,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -17344,20 +22634,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); if (!result.isInitialized()) { @@ -17365,17 +22655,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this); int from_bitField0_ = bitField0_; @@ -17400,7 +22680,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other); @@ -17409,7 +22689,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -17421,7 +22701,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -17441,61 +22721,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -17503,6 +22761,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -17516,6 +22777,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -17527,6 +22791,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -17543,6 +22810,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -17553,11 +22823,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -17565,6 +22841,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -17578,14 +22857,20 @@ public final class ClientProtos { } return regionBuilder_; } - + // required .NameBytesPair value = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + /** + * required .NameBytesPair value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .NameBytesPair value = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { return value_; @@ -17593,6 +22878,9 @@ public final class ClientProtos { return valueBuilder_.getMessage(); } } + /** + * required .NameBytesPair value = 2; + */ public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (value == null) { @@ -17606,6 +22894,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .NameBytesPair value = 2; + */ public Builder setValue( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (valueBuilder_ == null) { @@ -17617,6 +22908,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .NameBytesPair value = 2; + */ public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -17633,6 +22927,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .NameBytesPair value = 2; + */ public Builder clearValue() { if (valueBuilder_ == null) { value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); @@ -17643,11 +22940,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .NameBytesPair value = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { bitField0_ |= 0x00000002; onChanged(); return getValueFieldBuilder().getBuilder(); } + /** + * required .NameBytesPair value = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); @@ -17655,6 +22958,9 @@ public final class ClientProtos { return value_; } } + /** + * required .NameBytesPair value = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { @@ -17668,86 +22974,216 @@ public final class ClientProtos { } return valueBuilder_; } - + // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponse) } - + static { defaultInstance = new CoprocessorServiceResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse) } - + public interface MultiActionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .MutationProto mutation = 1; + /** + * optional .MutationProto mutation = 1; + */ boolean hasMutation(); + /** + * optional .MutationProto mutation = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(); + /** + * optional .MutationProto mutation = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); - + // optional .Get get = 2; + /** + * optional .Get get = 2; + */ boolean hasGet(); + /** + * optional .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + /** + * optional .Get get = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); } + /** + * Protobuf type {@code MultiAction} + * + *
+   **
+   * An action that is part of MultiRequest.
+   * This is a union type - exactly one of the fields will be set.
+   * 
+ */ public static final class MultiAction extends com.google.protobuf.GeneratedMessage implements MultiActionOrBuilder { // Use MultiAction.newBuilder() to construct. - private MultiAction(Builder builder) { + private MultiAction(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiAction(boolean noInit) {} - + private MultiAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiAction defaultInstance; public static MultiAction getDefaultInstance() { return defaultInstance; } - + public MultiAction getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiAction( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = mutation_.toBuilder(); + } + mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(mutation_); + mutation_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = get_.toBuilder(); + } + get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(get_); + get_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiAction parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiAction(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .MutationProto mutation = 1; public static final int MUTATION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_; + /** + * optional .MutationProto mutation = 1; + */ public boolean hasMutation() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .MutationProto mutation = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { return mutation_; } + /** + * optional .MutationProto mutation = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { return mutation_; } - + // optional .Get get = 2; public static final int GET_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; + /** + * optional .Get get = 2; + */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { return get_; } + /** + * optional .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { return get_; } - + private void initFields() { mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); @@ -17756,7 +23192,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasMutation()) { if (!getMutation().isInitialized()) { memoizedIsInitialized = 0; @@ -17772,7 +23208,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -17784,12 +23220,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -17803,14 +23239,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -17820,7 +23256,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) obj; - + boolean result = true; result = result && (hasMutation() == other.hasMutation()); if (hasMutation()) { @@ -17836,9 +23272,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMutation()) { @@ -17850,89 +23290,85 @@ public final class ClientProtos { hash = (53 * hash) + getGet().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiAction} + * + *
+     **
+     * An action that is part of MultiRequest.
+     * This is a union type - exactly one of the fields will be set.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder { @@ -17940,18 +23376,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -17964,7 +23403,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (mutationBuilder_ == null) { @@ -17981,20 +23420,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); if (!result.isInitialized()) { @@ -18002,17 +23441,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction(this); int from_bitField0_ = bitField0_; @@ -18037,7 +23466,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)other); @@ -18046,7 +23475,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()) return this; if (other.hasMutation()) { @@ -18058,7 +23487,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasMutation()) { if (!getMutation().isInitialized()) { @@ -18074,61 +23503,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(); - if (hasMutation()) { - subBuilder.mergeFrom(getMutation()); - } - input.readMessage(subBuilder, extensionRegistry); - setMutation(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); - if (hasGet()) { - subBuilder.mergeFrom(getGet()); - } - input.readMessage(subBuilder, extensionRegistry); - setGet(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .MutationProto mutation = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; + /** + * optional .MutationProto mutation = 1; + */ public boolean hasMutation() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .MutationProto mutation = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { return mutation_; @@ -18136,6 +23543,9 @@ public final class ClientProtos { return mutationBuilder_.getMessage(); } } + /** + * optional .MutationProto mutation = 1; + */ public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (value == null) { @@ -18149,6 +23559,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .MutationProto mutation = 1; + */ public Builder setMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationBuilder_ == null) { @@ -18160,6 +23573,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .MutationProto mutation = 1; + */ public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -18176,6 +23592,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .MutationProto mutation = 1; + */ public Builder clearMutation() { if (mutationBuilder_ == null) { mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); @@ -18186,11 +23605,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .MutationProto mutation = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMutationFieldBuilder().getBuilder(); } + /** + * optional .MutationProto mutation = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); @@ -18198,6 +23623,9 @@ public final class ClientProtos { return mutation_; } } + /** + * optional .MutationProto mutation = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { @@ -18211,14 +23639,20 @@ public final class ClientProtos { } return mutationBuilder_; } - + // optional .Get get = 2; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; + /** + * optional .Get get = 2; + */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { return get_; @@ -18226,6 +23660,9 @@ public final class ClientProtos { return getBuilder_.getMessage(); } } + /** + * optional .Get get = 2; + */ public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { @@ -18239,6 +23676,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Get get = 2; + */ public Builder setGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { @@ -18250,6 +23690,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Get get = 2; + */ public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -18266,6 +23709,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Get get = 2; + */ public Builder clearGet() { if (getBuilder_ == null) { get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); @@ -18276,11 +23722,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getGetFieldBuilder().getBuilder(); } + /** + * optional .Get get = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); @@ -18288,6 +23740,9 @@ public final class ClientProtos { return get_; } } + /** + * optional .Get get = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { @@ -18301,86 +23756,220 @@ public final class ClientProtos { } return getBuilder_; } - + // @@protoc_insertion_point(builder_scope:MultiAction) } - + static { defaultInstance = new MultiAction(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiAction) } - + public interface ActionResultOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .Result value = 1; + /** + * optional .Result value = 1; + */ boolean hasValue(); + /** + * optional .Result value = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue(); + /** + * optional .Result value = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder(); - + // optional .NameBytesPair exception = 2; + /** + * optional .NameBytesPair exception = 2; + */ boolean hasException(); + /** + * optional .NameBytesPair exception = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + /** + * optional .NameBytesPair exception = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); } + /** + * Protobuf type {@code ActionResult} + * + *
+   **
+   * An individual action result. The result will in the
+   * same order as the action in the request. If an action
+   * returns a value, it is set in value field. If it doesn't
+   * return anything, the result will be empty. If an action
+   * fails to execute due to any exception, the exception
+   * is returned as a stringified parameter.
+   * 
+ */ public static final class ActionResult extends com.google.protobuf.GeneratedMessage implements ActionResultOrBuilder { // Use ActionResult.newBuilder() to construct. - private ActionResult(Builder builder) { + private ActionResult(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ActionResult(boolean noInit) {} - + private ActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ActionResult defaultInstance; public static ActionResult getDefaultInstance() { return defaultInstance; } - + public ActionResult getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ActionResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = value_.toBuilder(); + } + value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = exception_.toBuilder(); + } + exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(exception_); + exception_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ActionResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ActionResult(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .Result value = 1; public static final int VALUE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_; + /** + * optional .Result value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result value = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { return value_; } + /** + * optional .Result value = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { return value_; } - + // optional .NameBytesPair exception = 2; public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + /** + * optional .NameBytesPair exception = 2; + */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .NameBytesPair exception = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { return exception_; } + /** + * optional .NameBytesPair exception = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { return exception_; } - + private void initFields() { value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); @@ -18389,7 +23978,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasException()) { if (!getException().isInitialized()) { memoizedIsInitialized = 0; @@ -18399,7 +23988,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -18411,12 +24000,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -18430,14 +24019,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -18447,7 +24036,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; - + boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { @@ -18463,9 +24052,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { @@ -18477,89 +24070,89 @@ public final class ClientProtos { hash = (53 * hash) + getException().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ActionResult} + * + *
+     **
+     * An individual action result. The result will in the
+     * same order as the action in the request. If an action
+     * returns a value, it is set in value field. If it doesn't
+     * return anything, the result will be empty. If an action
+     * fails to execute due to any exception, the exception
+     * is returned as a stringified parameter.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { @@ -18567,18 +24160,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -18591,7 +24187,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (valueBuilder_ == null) { @@ -18608,20 +24204,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); if (!result.isInitialized()) { @@ -18629,17 +24225,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); int from_bitField0_ = bitField0_; @@ -18664,7 +24250,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); @@ -18673,7 +24259,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; if (other.hasValue()) { @@ -18685,7 +24271,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasException()) { if (!getException().isInitialized()) { @@ -18695,61 +24281,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasException()) { - subBuilder.mergeFrom(getException()); - } - input.readMessage(subBuilder, extensionRegistry); - setException(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .Result value = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> valueBuilder_; + /** + * optional .Result value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .Result value = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { if (valueBuilder_ == null) { return value_; @@ -18757,6 +24321,9 @@ public final class ClientProtos { return valueBuilder_.getMessage(); } } + /** + * optional .Result value = 1; + */ public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (valueBuilder_ == null) { if (value == null) { @@ -18770,6 +24337,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result value = 1; + */ public Builder setValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (valueBuilder_ == null) { @@ -18781,6 +24351,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result value = 1; + */ public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -18797,6 +24370,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .Result value = 1; + */ public Builder clearValue() { if (valueBuilder_ == null) { value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); @@ -18807,11 +24383,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .Result value = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getValueBuilder() { bitField0_ |= 0x00000001; onChanged(); return getValueFieldBuilder().getBuilder(); } + /** + * optional .Result value = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); @@ -18819,6 +24401,9 @@ public final class ClientProtos { return value_; } } + /** + * optional .Result value = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getValueFieldBuilder() { @@ -18832,14 +24417,20 @@ public final class ClientProtos { } return valueBuilder_; } - + // optional .NameBytesPair exception = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + /** + * optional .NameBytesPair exception = 2; + */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .NameBytesPair exception = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { if (exceptionBuilder_ == null) { return exception_; @@ -18847,6 +24438,9 @@ public final class ClientProtos { return exceptionBuilder_.getMessage(); } } + /** + * optional .NameBytesPair exception = 2; + */ public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (value == null) { @@ -18860,6 +24454,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .NameBytesPair exception = 2; + */ public Builder setException( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (exceptionBuilder_ == null) { @@ -18871,6 +24468,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .NameBytesPair exception = 2; + */ public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -18887,6 +24487,9 @@ public final class ClientProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .NameBytesPair exception = 2; + */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); @@ -18897,11 +24500,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .NameBytesPair exception = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } + /** + * optional .NameBytesPair exception = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); @@ -18909,6 +24518,9 @@ public final class ClientProtos { return exception_; } } + /** + * optional .NameBytesPair exception = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getExceptionFieldBuilder() { @@ -18922,113 +24534,273 @@ public final class ClientProtos { } return exceptionBuilder_; } - + // @@protoc_insertion_point(builder_scope:ActionResult) } - + static { defaultInstance = new ActionResult(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ActionResult) } - + public interface MultiRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // repeated .MultiAction action = 2; + /** + * repeated .MultiAction action = 2; + */ java.util.List getActionList(); + /** + * repeated .MultiAction action = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index); + /** + * repeated .MultiAction action = 2; + */ int getActionCount(); + /** + * repeated .MultiAction action = 2; + */ java.util.List getActionOrBuilderList(); + /** + * repeated .MultiAction action = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( int index); - + // optional bool atomic = 3; + /** + * optional bool atomic = 3; + */ boolean hasAtomic(); + /** + * optional bool atomic = 3; + */ boolean getAtomic(); } + /** + * Protobuf type {@code MultiRequest} + * + *
+   **
+   * You can execute a list of actions on a given region in order.
+   *
+   * If it is a list of mutate actions, atomic can be set
+   * to make sure they can be processed atomically, just like
+   * RowMutations.
+   * 
+ */ public static final class MultiRequest extends com.google.protobuf.GeneratedMessage implements MultiRequestOrBuilder { // Use MultiRequest.newBuilder() to construct. - private MultiRequest(Builder builder) { + private MultiRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiRequest(boolean noInit) {} - + private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiRequest defaultInstance; public static MultiRequest getDefaultInstance() { return defaultInstance; } - + public MultiRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + action_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.PARSER, extensionRegistry)); + break; + } + case 24: { + bitField0_ |= 0x00000002; + atomic_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + action_ = java.util.Collections.unmodifiableList(action_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // repeated .MultiAction action = 2; public static final int ACTION_FIELD_NUMBER = 2; private java.util.List action_; + /** + * repeated .MultiAction action = 2; + */ public java.util.List getActionList() { return action_; } + /** + * repeated .MultiAction action = 2; + */ public java.util.List getActionOrBuilderList() { return action_; } + /** + * repeated .MultiAction action = 2; + */ public int getActionCount() { return action_.size(); } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { return action_.get(index); } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( int index) { return action_.get(index); } - + // optional bool atomic = 3; public static final int ATOMIC_FIELD_NUMBER = 3; private boolean atomic_; + /** + * optional bool atomic = 3; + */ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool atomic = 3; + */ public boolean getAtomic() { return atomic_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); action_ = java.util.Collections.emptyList(); @@ -19038,7 +24810,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -19056,7 +24828,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -19071,12 +24843,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -19094,14 +24866,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -19111,7 +24883,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -19129,9 +24901,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -19147,89 +24923,88 @@ public final class ClientProtos { hash = (53 * hash) + hashBoolean(getAtomic()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiRequest} + * + *
+     **
+     * You can execute a list of actions on a given region in order.
+     *
+     * If it is a list of mutate actions, atomic can be set
+     * to make sure they can be processed atomically, just like
+     * RowMutations.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { @@ -19237,18 +25012,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -19261,7 +25039,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -19280,20 +25058,20 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); if (!result.isInitialized()) { @@ -19301,17 +25079,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); int from_bitField0_ = bitField0_; @@ -19341,7 +25109,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); @@ -19350,7 +25118,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -19388,7 +25156,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -19406,63 +25174,39 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAction(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - atomic_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -19470,6 +25214,9 @@ public final class ClientProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -19483,6 +25230,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -19494,6 +25244,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -19510,6 +25263,9 @@ public final class ClientProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -19520,11 +25276,17 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -19532,6 +25294,9 @@ public final class ClientProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -19545,7 +25310,7 @@ public final class ClientProtos { } return regionBuilder_; } - + // repeated .MultiAction action = 2; private java.util.List action_ = java.util.Collections.emptyList(); @@ -19555,10 +25320,13 @@ public final class ClientProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> actionBuilder_; - + + /** + * repeated .MultiAction action = 2; + */ public java.util.List getActionList() { if (actionBuilder_ == null) { return java.util.Collections.unmodifiableList(action_); @@ -19566,6 +25334,9 @@ public final class ClientProtos { return actionBuilder_.getMessageList(); } } + /** + * repeated .MultiAction action = 2; + */ public int getActionCount() { if (actionBuilder_ == null) { return action_.size(); @@ -19573,6 +25344,9 @@ public final class ClientProtos { return actionBuilder_.getCount(); } } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { if (actionBuilder_ == null) { return action_.get(index); @@ -19580,6 +25354,9 @@ public final class ClientProtos { return actionBuilder_.getMessage(index); } } + /** + * repeated .MultiAction action = 2; + */ public Builder setAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { if (actionBuilder_ == null) { @@ -19594,6 +25371,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder setAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { if (actionBuilder_ == null) { @@ -19605,6 +25385,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { if (actionBuilder_ == null) { if (value == null) { @@ -19618,6 +25401,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder addAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { if (actionBuilder_ == null) { @@ -19632,6 +25418,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder addAction( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { if (actionBuilder_ == null) { @@ -19643,6 +25432,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder addAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { if (actionBuilder_ == null) { @@ -19654,6 +25446,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder addAllAction( java.lang.Iterable values) { if (actionBuilder_ == null) { @@ -19665,6 +25460,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder clearAction() { if (actionBuilder_ == null) { action_ = java.util.Collections.emptyList(); @@ -19675,6 +25473,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public Builder removeAction(int index) { if (actionBuilder_ == null) { ensureActionIsMutable(); @@ -19685,10 +25486,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder getActionBuilder( int index) { return getActionFieldBuilder().getBuilder(index); } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( int index) { if (actionBuilder_ == null) { @@ -19696,6 +25503,9 @@ public final class ClientProtos { return actionBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .MultiAction action = 2; + */ public java.util.List getActionOrBuilderList() { if (actionBuilder_ != null) { @@ -19704,15 +25514,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(action_); } } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder() { return getActionFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); } + /** + * repeated .MultiAction action = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder( int index) { return getActionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); } + /** + * repeated .MultiAction action = 2; + */ public java.util.List getActionBuilderList() { return getActionFieldBuilder().getBuilderList(); @@ -19731,101 +25550,216 @@ public final class ClientProtos { } return actionBuilder_; } - + // optional bool atomic = 3; private boolean atomic_ ; + /** + * optional bool atomic = 3; + */ public boolean hasAtomic() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool atomic = 3; + */ public boolean getAtomic() { return atomic_; } + /** + * optional bool atomic = 3; + */ public Builder setAtomic(boolean value) { bitField0_ |= 0x00000004; atomic_ = value; onChanged(); return this; } + /** + * optional bool atomic = 3; + */ public Builder clearAtomic() { bitField0_ = (bitField0_ & ~0x00000004); atomic_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MultiRequest) } - + static { defaultInstance = new MultiRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiRequest) } - + public interface MultiResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .ActionResult result = 1; + /** + * repeated .ActionResult result = 1; + */ java.util.List getResultList(); + /** + * repeated .ActionResult result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); + /** + * repeated .ActionResult result = 1; + */ int getResultCount(); + /** + * repeated .ActionResult result = 1; + */ java.util.List getResultOrBuilderList(); + /** + * repeated .ActionResult result = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index); } + /** + * Protobuf type {@code MultiResponse} + */ public static final class MultiResponse extends com.google.protobuf.GeneratedMessage implements MultiResponseOrBuilder { // Use MultiResponse.newBuilder() to construct. - private MultiResponse(Builder builder) { + private MultiResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiResponse(boolean noInit) {} - + private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiResponse defaultInstance; public static MultiResponse getDefaultInstance() { return defaultInstance; } - + public MultiResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .ActionResult result = 1; public static final int RESULT_FIELD_NUMBER = 1; private java.util.List result_; + /** + * repeated .ActionResult result = 1; + */ public java.util.List getResultList() { return result_; } + /** + * repeated .ActionResult result = 1; + */ public java.util.List getResultOrBuilderList() { return result_; } + /** + * repeated .ActionResult result = 1; + */ public int getResultCount() { return result_.size(); } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { return result_.get(index); } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index) { return result_.get(index); } - + private void initFields() { result_ = java.util.Collections.emptyList(); } @@ -19833,7 +25767,7 @@ public final class ClientProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getResultCount(); i++) { if (!getResult(i).isInitialized()) { memoizedIsInitialized = 0; @@ -19843,7 +25777,7 @@ public final class ClientProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -19852,12 +25786,12 @@ public final class ClientProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < result_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -19867,14 +25801,14 @@ public final class ClientProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -19884,7 +25818,7 @@ public final class ClientProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; - + boolean result = true; result = result && getResultList() .equals(other.getResultList()); @@ -19892,9 +25826,13 @@ public final class ClientProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getResultCount() > 0) { @@ -19902,89 +25840,79 @@ public final class ClientProtos { hash = (53 * hash) + getResultList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder { @@ -19992,18 +25920,21 @@ public final class ClientProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -20015,7 +25946,7 @@ public final class ClientProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (resultBuilder_ == null) { @@ -20026,20 +25957,20 @@ public final class ClientProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); if (!result.isInitialized()) { @@ -20047,17 +25978,7 @@ public final class ClientProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); int from_bitField0_ = bitField0_; @@ -20073,7 +25994,7 @@ public final class ClientProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other); @@ -20082,7 +26003,7 @@ public final class ClientProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; if (resultBuilder_ == null) { @@ -20114,7 +26035,7 @@ public final class ClientProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getResultCount(); i++) { if (!getResult(i).isInitialized()) { @@ -20124,42 +26045,26 @@ public final class ClientProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .ActionResult result = 1; private java.util.List result_ = java.util.Collections.emptyList(); @@ -20169,10 +26074,13 @@ public final class ClientProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; - + + /** + * repeated .ActionResult result = 1; + */ public java.util.List getResultList() { if (resultBuilder_ == null) { return java.util.Collections.unmodifiableList(result_); @@ -20180,6 +26088,9 @@ public final class ClientProtos { return resultBuilder_.getMessageList(); } } + /** + * repeated .ActionResult result = 1; + */ public int getResultCount() { if (resultBuilder_ == null) { return result_.size(); @@ -20187,6 +26098,9 @@ public final class ClientProtos { return resultBuilder_.getCount(); } } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { if (resultBuilder_ == null) { return result_.get(index); @@ -20194,6 +26108,9 @@ public final class ClientProtos { return resultBuilder_.getMessage(index); } } + /** + * repeated .ActionResult result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { if (resultBuilder_ == null) { @@ -20208,6 +26125,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder setResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { if (resultBuilder_ == null) { @@ -20219,6 +26139,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { if (resultBuilder_ == null) { if (value == null) { @@ -20232,6 +26155,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { if (resultBuilder_ == null) { @@ -20246,6 +26172,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder addResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { if (resultBuilder_ == null) { @@ -20257,6 +26186,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder addResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { if (resultBuilder_ == null) { @@ -20268,6 +26200,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder addAllResult( java.lang.Iterable values) { if (resultBuilder_ == null) { @@ -20279,6 +26214,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = java.util.Collections.emptyList(); @@ -20289,6 +26227,9 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public Builder removeResult(int index) { if (resultBuilder_ == null) { ensureResultIsMutable(); @@ -20299,10 +26240,16 @@ public final class ClientProtos { } return this; } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( int index) { return getResultFieldBuilder().getBuilder(index); } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index) { if (resultBuilder_ == null) { @@ -20310,6 +26257,9 @@ public final class ClientProtos { return resultBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .ActionResult result = 1; + */ public java.util.List getResultOrBuilderList() { if (resultBuilder_ != null) { @@ -20318,15 +26268,24 @@ public final class ClientProtos { return java.util.Collections.unmodifiableList(result_); } } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { return getResultFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); } + /** + * repeated .ActionResult result = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( int index) { return getResultFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); } + /** + * repeated .ActionResult result = 1; + */ public java.util.List getResultBuilderList() { return getResultFieldBuilder().getBuilderList(); @@ -20345,60 +26304,84 @@ public final class ClientProtos { } return resultBuilder_; } - + // @@protoc_insertion_point(builder_scope:MultiResponse) } - + static { defaultInstance = new MultiResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiResponse) } - + + /** + * Protobuf service {@code ClientService} + */ public static abstract class ClientService implements com.google.protobuf.Service { protected ClientService() {} - + public interface Interface { + /** + * rpc get(.GetRequest) returns (.GetResponse); + */ public abstract void get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc multiGet(.MultiGetRequest) returns (.MultiGetResponse); + */ public abstract void multiGet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc mutate(.MutateRequest) returns (.MutateResponse); + */ public abstract void mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc scan(.ScanRequest) returns (.ScanResponse); + */ public abstract void scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc bulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse); + */ public abstract void bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc execService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse); + */ public abstract void execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc multi(.MultiRequest) returns (.MultiResponse); + */ public abstract void multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new ClientService() { @@ -20409,7 +26392,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.get(controller, request, done); } - + @java.lang.Override public void multiGet( com.google.protobuf.RpcController controller, @@ -20417,7 +26400,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.multiGet(controller, request, done); } - + @java.lang.Override public void mutate( com.google.protobuf.RpcController controller, @@ -20425,7 +26408,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.mutate(controller, request, done); } - + @java.lang.Override public void scan( com.google.protobuf.RpcController controller, @@ -20433,7 +26416,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.scan(controller, request, done); } - + @java.lang.Override public void bulkLoadHFile( com.google.protobuf.RpcController controller, @@ -20441,7 +26424,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.bulkLoadHFile(controller, request, done); } - + @java.lang.Override public void execService( com.google.protobuf.RpcController controller, @@ -20449,7 +26432,7 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.execService(controller, request, done); } - + @java.lang.Override public void multi( com.google.protobuf.RpcController controller, @@ -20457,10 +26440,10 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done) { impl.multi(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -20468,7 +26451,7 @@ public final class ClientProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -20498,7 +26481,7 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -20526,7 +26509,7 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -20554,45 +26537,66 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc get(.GetRequest) returns (.GetResponse); + */ public abstract void get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc multiGet(.MultiGetRequest) returns (.MultiGetResponse); + */ public abstract void multiGet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc mutate(.MutateRequest) returns (.MutateResponse); + */ public abstract void mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc scan(.ScanRequest) returns (.ScanResponse); + */ public abstract void scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc bulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse); + */ public abstract void bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc execService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse); + */ public abstract void execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc multi(.MultiRequest) returns (.MultiResponse); + */ public abstract void multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -20602,7 +26606,7 @@ public final class ClientProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -20654,7 +26658,7 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -20682,7 +26686,7 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -20710,23 +26714,23 @@ public final class ClientProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, @@ -20741,7 +26745,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); } - + public void multiGet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest request, @@ -20756,7 +26760,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.getDefaultInstance())); } - + public void mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, @@ -20771,7 +26775,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); } - + public void scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, @@ -20786,7 +26790,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); } - + public void bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, @@ -20801,7 +26805,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); } - + public void execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, @@ -20816,7 +26820,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } - + public void multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, @@ -20832,56 +26836,56 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse multiGet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) @@ -20892,8 +26896,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse multiGet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest request) @@ -20904,8 +26908,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) @@ -20916,8 +26920,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) @@ -20928,8 +26932,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) @@ -20940,8 +26944,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) @@ -20952,8 +26956,8 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) @@ -20964,10 +26968,12 @@ public final class ClientProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:ClientService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_Column_descriptor; private static @@ -21098,7 +27104,7 @@ public final class ClientProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MultiResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -21206,209 +27212,157 @@ public final class ClientProtos { internal_static_Column_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Column_descriptor, - new java.lang.String[] { "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); + new java.lang.String[] { "Family", "Qualifier", }); internal_static_Get_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_Get_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Get_descriptor, - new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); + new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", }); internal_static_Result_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_Result_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Result_descriptor, - new java.lang.String[] { "Cell", "AssociatedCellCount", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); + new java.lang.String[] { "Cell", "AssociatedCellCount", }); internal_static_GetRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_GetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRequest_descriptor, - new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); + new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }); internal_static_MultiGetRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_MultiGetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiGetRequest_descriptor, - new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest.Builder.class); + new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }); internal_static_GetResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_GetResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetResponse_descriptor, - new java.lang.String[] { "Result", "Exists", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); + new java.lang.String[] { "Result", "Exists", }); internal_static_MultiGetResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_MultiGetResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiGetResponse_descriptor, - new java.lang.String[] { "Result", "Exists", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetResponse.Builder.class); + new java.lang.String[] { "Result", "Exists", }); internal_static_Condition_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_Condition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Condition_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }); internal_static_MutationProto_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_MutationProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutationProto_descriptor, - new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "WriteToWAL", "TimeRange", "AssociatedCellCount", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class); + new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "WriteToWAL", "TimeRange", "AssociatedCellCount", }); internal_static_MutationProto_ColumnValue_descriptor = internal_static_MutationProto_descriptor.getNestedTypes().get(0); internal_static_MutationProto_ColumnValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutationProto_ColumnValue_descriptor, - new java.lang.String[] { "Family", "QualifierValue", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); + new java.lang.String[] { "Family", "QualifierValue", }); internal_static_MutationProto_ColumnValue_QualifierValue_descriptor = internal_static_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0); internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutationProto_ColumnValue_QualifierValue_descriptor, - new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); + new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }); internal_static_MutateRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_MutateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateRequest_descriptor, - new java.lang.String[] { "Region", "Mutation", "Condition", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); + new java.lang.String[] { "Region", "Mutation", "Condition", }); internal_static_MutateResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_MutateResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateResponse_descriptor, - new java.lang.String[] { "Result", "Processed", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); + new java.lang.String[] { "Result", "Processed", }); internal_static_Scan_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_Scan_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Scan_descriptor, - new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", }); internal_static_ScanRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_ScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanRequest_descriptor, - new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); + new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", }); internal_static_ScanResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_ScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanResponse_descriptor, - new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", "ResultSizeBytes", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); + new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", "ResultSizeBytes", }); internal_static_BulkLoadHFileRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_BulkLoadHFileRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileRequest_descriptor, - new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); + new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", }); internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, - new java.lang.String[] { "Family", "Path", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + new java.lang.String[] { "Family", "Path", }); internal_static_BulkLoadHFileResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_BulkLoadHFileResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileResponse_descriptor, - new java.lang.String[] { "Loaded", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + new java.lang.String[] { "Loaded", }); internal_static_CoprocessorServiceCall_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_CoprocessorServiceCall_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceCall_descriptor, - new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); + new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); internal_static_CoprocessorServiceRequest_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_CoprocessorServiceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceRequest_descriptor, - new java.lang.String[] { "Region", "Call", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); + new java.lang.String[] { "Region", "Call", }); internal_static_CoprocessorServiceResponse_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_CoprocessorServiceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceResponse_descriptor, - new java.lang.String[] { "Region", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); + new java.lang.String[] { "Region", "Value", }); internal_static_MultiAction_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_MultiAction_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiAction_descriptor, - new java.lang.String[] { "Mutation", "Get", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); + new java.lang.String[] { "Mutation", "Get", }); internal_static_ActionResult_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_ActionResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ActionResult_descriptor, - new java.lang.String[] { "Value", "Exception", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); + new java.lang.String[] { "Value", "Exception", }); internal_static_MultiRequest_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_MultiRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRequest_descriptor, - new java.lang.String[] { "Region", "Action", "Atomic", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + new java.lang.String[] { "Region", "Action", "Atomic", }); internal_static_MultiResponse_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_MultiResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiResponse_descriptor, - new java.lang.String[] { "Result", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); + new java.lang.String[] { "Result", }); return null; } }; @@ -21419,6 +27373,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java index aac3b80..1ba464c 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java @@ -10,72 +10,191 @@ public final class ClusterIdProtos { } public interface ClusterIdOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string clusterId = 1; + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ boolean hasClusterId(); - String getClusterId(); + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ + java.lang.String getClusterId(); + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ + com.google.protobuf.ByteString + getClusterIdBytes(); } + /** + * Protobuf type {@code ClusterId} + * + *
+   **
+   * Content of the '/hbase/hbaseid', cluster id, znode.
+   * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
+   * 
+ */ public static final class ClusterId extends com.google.protobuf.GeneratedMessage implements ClusterIdOrBuilder { // Use ClusterId.newBuilder() to construct. - private ClusterId(Builder builder) { + private ClusterId(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ClusterId(boolean noInit) {} - + private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ClusterId defaultInstance; public static ClusterId getDefaultInstance() { return defaultInstance; } - + public ClusterId getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClusterId( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + clusterId_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ClusterId parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterId(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string clusterId = 1; public static final int CLUSTERID_FIELD_NUMBER = 1; private java.lang.Object clusterId_; + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ public boolean hasClusterId() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClusterId() { + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ + public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { clusterId_ = s; } return s; } } - private com.google.protobuf.ByteString getClusterIdBytes() { + /** + * required string clusterId = 1; + * + *
+     * This is the cluster id, a uuid as a String
+     * 
+ */ + public com.google.protobuf.ByteString + getClusterIdBytes() { java.lang.Object ref = clusterId_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); clusterId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { clusterId_ = ""; } @@ -83,7 +202,7 @@ public final class ClusterIdProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasClusterId()) { memoizedIsInitialized = 0; return false; @@ -91,7 +210,7 @@ public final class ClusterIdProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -100,12 +219,12 @@ public final class ClusterIdProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -115,14 +234,14 @@ public final class ClusterIdProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -132,7 +251,7 @@ public final class ClusterIdProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj; - + boolean result = true; result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { @@ -143,9 +262,13 @@ public final class ClusterIdProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClusterId()) { @@ -153,89 +276,85 @@ public final class ClusterIdProtos { hash = (53 * hash) + getClusterId().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ClusterId} + * + *
+     **
+     * Content of the '/hbase/hbaseid', cluster id, znode.
+     * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder { @@ -243,18 +362,21 @@ public final class ClusterIdProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -265,27 +387,27 @@ public final class ClusterIdProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); clusterId_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() { org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); if (!result.isInitialized()) { @@ -293,17 +415,7 @@ public final class ClusterIdProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this); int from_bitField0_ = bitField0_; @@ -316,7 +428,7 @@ public final class ClusterIdProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other); @@ -325,16 +437,18 @@ public final class ClusterIdProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this; if (other.hasClusterId()) { - setClusterId(other.getClusterId()); + bitField0_ |= 0x00000001; + clusterId_ = other.clusterId_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasClusterId()) { @@ -342,57 +456,85 @@ public final class ClusterIdProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - clusterId_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string clusterId = 1; private java.lang.Object clusterId_ = ""; + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ public boolean hasClusterId() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClusterId() { + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ + public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); clusterId_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ + public com.google.protobuf.ByteString + getClusterIdBytes() { + java.lang.Object ref = clusterId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setClusterId(String value) { + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ + public Builder setClusterId( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -401,35 +543,54 @@ public final class ClusterIdProtos { onChanged(); return this; } + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ public Builder clearClusterId() { bitField0_ = (bitField0_ & ~0x00000001); clusterId_ = getDefaultInstance().getClusterId(); onChanged(); return this; } - void setClusterId(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string clusterId = 1; + * + *
+       * This is the cluster id, a uuid as a String
+       * 
+ */ + public Builder setClusterIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; clusterId_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ClusterId) } - + static { defaultInstance = new ClusterId(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ClusterId) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_ClusterId_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ClusterId_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -452,9 +613,7 @@ public final class ClusterIdProtos { internal_static_ClusterId_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ClusterId_descriptor, - new java.lang.String[] { "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); + new java.lang.String[] { "ClusterId", }); return null; } }; @@ -463,6 +622,6 @@ public final class ClusterIdProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java index 6dd12e3..a7f2212 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java @@ -10,78 +10,347 @@ public final class ClusterStatusProtos { } public interface RegionStateOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionInfo regionInfo = 1; + /** + * required .RegionInfo regionInfo = 1; + */ boolean hasRegionInfo(); + /** + * required .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + /** + * required .RegionInfo regionInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - + // required .RegionState.State state = 2; + /** + * required .RegionState.State state = 2; + */ boolean hasState(); + /** + * required .RegionState.State state = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); - + // optional uint64 stamp = 3; + /** + * optional uint64 stamp = 3; + */ boolean hasStamp(); + /** + * optional uint64 stamp = 3; + */ long getStamp(); } + /** + * Protobuf type {@code RegionState} + */ public static final class RegionState extends com.google.protobuf.GeneratedMessage implements RegionStateOrBuilder { // Use RegionState.newBuilder() to construct. - private RegionState(Builder builder) { + private RegionState(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionState(boolean noInit) {} - + private RegionState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionState defaultInstance; public static RegionState getDefaultInstance() { return defaultInstance; } - + public RegionState getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionState( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = regionInfo_.toBuilder(); + } + regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionInfo_); + regionInfo_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + state_ = value; + } + break; + } + case 24: { + bitField0_ |= 0x00000004; + stamp_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionState(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code RegionState.State} + */ public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * OFFLINE = 0; + * + *
+       * region is in an offline state
+       * 
+ */ OFFLINE(0, 0), + /** + * PENDING_OPEN = 1; + * + *
+       * sent rpc to server to open but has not begun
+       * 
+ */ PENDING_OPEN(1, 1), + /** + * OPENING = 2; + * + *
+       * server has begun to open but not yet done
+       * 
+ */ OPENING(2, 2), + /** + * OPEN = 3; + * + *
+       * server opened region and updated meta
+       * 
+ */ OPEN(3, 3), + /** + * PENDING_CLOSE = 4; + * + *
+       * sent rpc to server to close but has not begun
+       * 
+ */ PENDING_CLOSE(4, 4), + /** + * CLOSING = 5; + * + *
+       * server has begun to close but not yet done
+       * 
+ */ CLOSING(5, 5), + /** + * CLOSED = 6; + * + *
+       * server closed region and updated meta
+       * 
+ */ CLOSED(6, 6), + /** + * SPLITTING = 7; + * + *
+       * server started split of a region
+       * 
+ */ SPLITTING(7, 7), + /** + * SPLIT = 8; + * + *
+       * server completed split of a region
+       * 
+ */ SPLIT(8, 8), + /** + * FAILED_OPEN = 9; + * + *
+       * failed to open, and won't retry any more
+       * 
+ */ FAILED_OPEN(9, 9), + /** + * FAILED_CLOSE = 10; + * + *
+       * failed to close, and won't retry any more
+       * 
+ */ FAILED_CLOSE(10, 10), ; - + + /** + * OFFLINE = 0; + * + *
+       * region is in an offline state
+       * 
+ */ public static final int OFFLINE_VALUE = 0; + /** + * PENDING_OPEN = 1; + * + *
+       * sent rpc to server to open but has not begun
+       * 
+ */ public static final int PENDING_OPEN_VALUE = 1; + /** + * OPENING = 2; + * + *
+       * server has begun to open but not yet done
+       * 
+ */ public static final int OPENING_VALUE = 2; + /** + * OPEN = 3; + * + *
+       * server opened region and updated meta
+       * 
+ */ public static final int OPEN_VALUE = 3; + /** + * PENDING_CLOSE = 4; + * + *
+       * sent rpc to server to close but has not begun
+       * 
+ */ public static final int PENDING_CLOSE_VALUE = 4; + /** + * CLOSING = 5; + * + *
+       * server has begun to close but not yet done
+       * 
+ */ public static final int CLOSING_VALUE = 5; + /** + * CLOSED = 6; + * + *
+       * server closed region and updated meta
+       * 
+ */ public static final int CLOSED_VALUE = 6; + /** + * SPLITTING = 7; + * + *
+       * server started split of a region
+       * 
+ */ public static final int SPLITTING_VALUE = 7; + /** + * SPLIT = 8; + * + *
+       * server completed split of a region
+       * 
+ */ public static final int SPLIT_VALUE = 8; + /** + * FAILED_OPEN = 9; + * + *
+       * failed to open, and won't retry any more
+       * 
+ */ public static final int FAILED_OPEN_VALUE = 9; + /** + * FAILED_CLOSE = 10; + * + *
+       * failed to close, and won't retry any more
+       * 
+ */ public static final int FAILED_CLOSE_VALUE = 10; - - + + public final int getNumber() { return value; } - + public static State valueOf(int value) { switch (value) { case 0: return OFFLINE; @@ -98,7 +367,7 @@ public final class ClusterStatusProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -110,7 +379,7 @@ public final class ClusterStatusProtos { return State.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -123,11 +392,9 @@ public final class ClusterStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor().getEnumTypes().get(0); } - - private static final State[] VALUES = { - OFFLINE, PENDING_OPEN, OPENING, OPEN, PENDING_CLOSE, CLOSING, CLOSED, SPLITTING, SPLIT, FAILED_OPEN, FAILED_CLOSE, - }; - + + private static final State[] VALUES = values(); + public static State valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -136,52 +403,73 @@ public final class ClusterStatusProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private State(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:RegionState.State) } - + private int bitField0_; // required .RegionInfo regionInfo = 1; public static final int REGIONINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + /** + * required .RegionInfo regionInfo = 1; + */ public boolean hasRegionInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { return regionInfo_; } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { return regionInfo_; } - + // required .RegionState.State state = 2; public static final int STATE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_; + /** + * required .RegionState.State state = 2; + */ public boolean hasState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionState.State state = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { return state_; } - + // optional uint64 stamp = 3; public static final int STAMP_FIELD_NUMBER = 3; private long stamp_; + /** + * optional uint64 stamp = 3; + */ public boolean hasStamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 stamp = 3; + */ public long getStamp() { return stamp_; } - + private void initFields() { regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; @@ -191,7 +479,7 @@ public final class ClusterStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionInfo()) { memoizedIsInitialized = 0; return false; @@ -207,7 +495,7 @@ public final class ClusterStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -222,12 +510,12 @@ public final class ClusterStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -245,14 +533,14 @@ public final class ClusterStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -262,7 +550,7 @@ public final class ClusterStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) obj; - + boolean result = true; result = result && (hasRegionInfo() == other.hasRegionInfo()); if (hasRegionInfo()) { @@ -283,9 +571,13 @@ public final class ClusterStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionInfo()) { @@ -301,89 +593,79 @@ public final class ClusterStatusProtos { hash = (53 * hash) + hashLong(getStamp()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionState} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { @@ -391,18 +673,21 @@ public final class ClusterStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -414,7 +699,7 @@ public final class ClusterStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { @@ -429,20 +714,20 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState build() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); if (!result.isInitialized()) { @@ -450,17 +735,7 @@ public final class ClusterStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState(this); int from_bitField0_ = bitField0_; @@ -485,7 +760,7 @@ public final class ClusterStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)other); @@ -494,7 +769,7 @@ public final class ClusterStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) return this; if (other.hasRegionInfo()) { @@ -509,7 +784,7 @@ public final class ClusterStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionInfo()) { @@ -525,68 +800,39 @@ public final class ClusterStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegionInfo()) { - subBuilder.mergeFrom(getRegionInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionInfo(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - state_ = value; - } - break; - } - case 24: { - bitField0_ |= 0x00000004; - stamp_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionInfo regionInfo = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + /** + * required .RegionInfo regionInfo = 1; + */ public boolean hasRegionInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { return regionInfo_; @@ -594,6 +840,9 @@ public final class ClusterStatusProtos { return regionInfoBuilder_.getMessage(); } } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (value == null) { @@ -607,6 +856,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder setRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { if (regionInfoBuilder_ == null) { @@ -618,6 +870,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -634,6 +889,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); @@ -644,11 +902,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionInfoFieldBuilder().getBuilder(); } + /** + * required .RegionInfo regionInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); @@ -656,6 +920,9 @@ public final class ClusterStatusProtos { return regionInfo_; } } + /** + * required .RegionInfo regionInfo = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { @@ -669,15 +936,24 @@ public final class ClusterStatusProtos { } return regionInfoBuilder_; } - + // required .RegionState.State state = 2; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + /** + * required .RegionState.State state = 2; + */ public boolean hasState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionState.State state = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { return state_; } + /** + * required .RegionState.State state = 2; + */ public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) { if (value == null) { throw new NullPointerException(); @@ -687,113 +963,252 @@ public final class ClusterStatusProtos { onChanged(); return this; } + /** + * required .RegionState.State state = 2; + */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; onChanged(); return this; } - + // optional uint64 stamp = 3; private long stamp_ ; + /** + * optional uint64 stamp = 3; + */ public boolean hasStamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 stamp = 3; + */ public long getStamp() { return stamp_; } + /** + * optional uint64 stamp = 3; + */ public Builder setStamp(long value) { bitField0_ |= 0x00000004; stamp_ = value; onChanged(); return this; } + /** + * optional uint64 stamp = 3; + */ public Builder clearStamp() { bitField0_ = (bitField0_ & ~0x00000004); stamp_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RegionState) } - + static { defaultInstance = new RegionState(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionState) } - + public interface RegionInTransitionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier spec = 1; + /** + * required .RegionSpecifier spec = 1; + */ boolean hasSpec(); + /** + * required .RegionSpecifier spec = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec(); + /** + * required .RegionSpecifier spec = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); - + // required .RegionState regionState = 2; + /** + * required .RegionState regionState = 2; + */ boolean hasRegionState(); + /** + * required .RegionState regionState = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState(); + /** + * required .RegionState regionState = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder(); } + /** + * Protobuf type {@code RegionInTransition} + */ public static final class RegionInTransition extends com.google.protobuf.GeneratedMessage implements RegionInTransitionOrBuilder { // Use RegionInTransition.newBuilder() to construct. - private RegionInTransition(Builder builder) { + private RegionInTransition(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionInTransition(boolean noInit) {} - + private RegionInTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionInTransition defaultInstance; public static RegionInTransition getDefaultInstance() { return defaultInstance; } - + public RegionInTransition getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionInTransition( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = spec_.toBuilder(); + } + spec_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(spec_); + spec_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = regionState_.toBuilder(); + } + regionState_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionState_); + regionState_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionInTransition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionInTransition(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier spec = 1; public static final int SPEC_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_; + /** + * required .RegionSpecifier spec = 1; + */ public boolean hasSpec() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier spec = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { return spec_; } + /** + * required .RegionSpecifier spec = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { return spec_; } - + // required .RegionState regionState = 2; public static final int REGIONSTATE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_; + /** + * required .RegionState regionState = 2; + */ public boolean hasRegionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionState regionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { return regionState_; } + /** + * required .RegionState regionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { return regionState_; } - + private void initFields() { spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); @@ -802,7 +1217,7 @@ public final class ClusterStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSpec()) { memoizedIsInitialized = 0; return false; @@ -822,7 +1237,7 @@ public final class ClusterStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -834,12 +1249,12 @@ public final class ClusterStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -853,14 +1268,14 @@ public final class ClusterStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -870,7 +1285,7 @@ public final class ClusterStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) obj; - + boolean result = true; result = result && (hasSpec() == other.hasSpec()); if (hasSpec()) { @@ -886,9 +1301,13 @@ public final class ClusterStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSpec()) { @@ -900,89 +1319,79 @@ public final class ClusterStatusProtos { hash = (53 * hash) + getRegionState().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionInTransition} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { @@ -990,18 +1399,21 @@ public final class ClusterStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1014,7 +1426,7 @@ public final class ClusterStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (specBuilder_ == null) { @@ -1031,20 +1443,20 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition build() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); if (!result.isInitialized()) { @@ -1052,17 +1464,7 @@ public final class ClusterStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition(this); int from_bitField0_ = bitField0_; @@ -1087,7 +1489,7 @@ public final class ClusterStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); @@ -1096,7 +1498,7 @@ public final class ClusterStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()) return this; if (other.hasSpec()) { @@ -1108,7 +1510,7 @@ public final class ClusterStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSpec()) { @@ -1128,61 +1530,39 @@ public final class ClusterStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasSpec()) { - subBuilder.mergeFrom(getSpec()); - } - input.readMessage(subBuilder, extensionRegistry); - setSpec(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(); - if (hasRegionState()) { - subBuilder.mergeFrom(getRegionState()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionState(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier spec = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; + /** + * required .RegionSpecifier spec = 1; + */ public boolean hasSpec() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier spec = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { if (specBuilder_ == null) { return spec_; @@ -1190,6 +1570,9 @@ public final class ClusterStatusProtos { return specBuilder_.getMessage(); } } + /** + * required .RegionSpecifier spec = 1; + */ public Builder setSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (specBuilder_ == null) { if (value == null) { @@ -1203,6 +1586,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier spec = 1; + */ public Builder setSpec( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (specBuilder_ == null) { @@ -1214,6 +1600,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier spec = 1; + */ public Builder mergeSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (specBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1230,6 +1619,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier spec = 1; + */ public Builder clearSpec() { if (specBuilder_ == null) { spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -1240,11 +1632,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier spec = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getSpecBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSpecFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier spec = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { if (specBuilder_ != null) { return specBuilder_.getMessageOrBuilder(); @@ -1252,6 +1650,9 @@ public final class ClusterStatusProtos { return spec_; } } + /** + * required .RegionSpecifier spec = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getSpecFieldBuilder() { @@ -1265,14 +1666,20 @@ public final class ClusterStatusProtos { } return specBuilder_; } - + // required .RegionState regionState = 2; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; + /** + * required .RegionState regionState = 2; + */ public boolean hasRegionState() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionState regionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { if (regionStateBuilder_ == null) { return regionState_; @@ -1280,6 +1687,9 @@ public final class ClusterStatusProtos { return regionStateBuilder_.getMessage(); } } + /** + * required .RegionState regionState = 2; + */ public Builder setRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { if (regionStateBuilder_ == null) { if (value == null) { @@ -1293,6 +1703,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionState regionState = 2; + */ public Builder setRegionState( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder builderForValue) { if (regionStateBuilder_ == null) { @@ -1304,6 +1717,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionState regionState = 2; + */ public Builder mergeRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { if (regionStateBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -1320,6 +1736,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionState regionState = 2; + */ public Builder clearRegionState() { if (regionStateBuilder_ == null) { regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); @@ -1330,11 +1749,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .RegionState regionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder getRegionStateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionStateFieldBuilder().getBuilder(); } + /** + * required .RegionState regionState = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { if (regionStateBuilder_ != null) { return regionStateBuilder_.getMessageOrBuilder(); @@ -1342,6 +1767,9 @@ public final class ClusterStatusProtos { return regionState_; } } + /** + * required .RegionState regionState = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> getRegionStateFieldBuilder() { @@ -1355,86 +1783,210 @@ public final class ClusterStatusProtos { } return regionStateBuilder_; } - + // @@protoc_insertion_point(builder_scope:RegionInTransition) } - + static { defaultInstance = new RegionInTransition(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionInTransition) } - + public interface LiveServerInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName server = 1; + /** + * required .ServerName server = 1; + */ boolean hasServer(); + /** + * required .ServerName server = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * required .ServerName server = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - + // required .ServerLoad serverLoad = 2; + /** + * required .ServerLoad serverLoad = 2; + */ boolean hasServerLoad(); + /** + * required .ServerLoad serverLoad = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad(); + /** + * required .ServerLoad serverLoad = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder(); } + /** + * Protobuf type {@code LiveServerInfo} + */ public static final class LiveServerInfo extends com.google.protobuf.GeneratedMessage implements LiveServerInfoOrBuilder { // Use LiveServerInfo.newBuilder() to construct. - private LiveServerInfo(Builder builder) { + private LiveServerInfo(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private LiveServerInfo(boolean noInit) {} - + private LiveServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final LiveServerInfo defaultInstance; public static LiveServerInfo getDefaultInstance() { return defaultInstance; } - + public LiveServerInfo getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private LiveServerInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = serverLoad_.toBuilder(); + } + serverLoad_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverLoad_); + serverLoad_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public LiveServerInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LiveServerInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + /** + * required .ServerName server = 1; + */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { return server_; } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_; } - + // required .ServerLoad serverLoad = 2; public static final int SERVERLOAD_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_; + /** + * required .ServerLoad serverLoad = 2; + */ public boolean hasServerLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ServerLoad serverLoad = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { return serverLoad_; } + /** + * required .ServerLoad serverLoad = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { return serverLoad_; } - + private void initFields() { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); @@ -1443,7 +1995,7 @@ public final class ClusterStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServer()) { memoizedIsInitialized = 0; return false; @@ -1463,7 +2015,7 @@ public final class ClusterStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1475,12 +2027,12 @@ public final class ClusterStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1494,14 +2046,14 @@ public final class ClusterStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1511,7 +2063,7 @@ public final class ClusterStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) obj; - + boolean result = true; result = result && (hasServer() == other.hasServer()); if (hasServer()) { @@ -1527,9 +2079,13 @@ public final class ClusterStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServer()) { @@ -1541,89 +2097,79 @@ public final class ClusterStatusProtos { hash = (53 * hash) + getServerLoad().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code LiveServerInfo} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { @@ -1631,18 +2177,21 @@ public final class ClusterStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1655,7 +2204,7 @@ public final class ClusterStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverBuilder_ == null) { @@ -1672,20 +2221,20 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo build() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); if (!result.isInitialized()) { @@ -1693,17 +2242,7 @@ public final class ClusterStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo(this); int from_bitField0_ = bitField0_; @@ -1728,7 +2267,7 @@ public final class ClusterStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other); @@ -1737,7 +2276,7 @@ public final class ClusterStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()) return this; if (other.hasServer()) { @@ -1749,7 +2288,7 @@ public final class ClusterStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServer()) { @@ -1769,61 +2308,39 @@ public final class ClusterStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); - if (hasServerLoad()) { - subBuilder.mergeFrom(getServerLoad()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerLoad(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName server = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + /** + * required .ServerName server = 1; + */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_; @@ -1831,6 +2348,9 @@ public final class ClusterStatusProtos { return serverBuilder_.getMessage(); } } + /** + * required .ServerName server = 1; + */ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { @@ -1844,6 +2364,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { @@ -1855,6 +2378,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1871,6 +2397,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -1881,11 +2410,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerFieldBuilder().getBuilder(); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); @@ -1893,6 +2428,9 @@ public final class ClusterStatusProtos { return server_; } } + /** + * required .ServerName server = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { @@ -1906,14 +2444,20 @@ public final class ClusterStatusProtos { } return serverBuilder_; } - + // required .ServerLoad serverLoad = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> serverLoadBuilder_; + /** + * required .ServerLoad serverLoad = 2; + */ public boolean hasServerLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ServerLoad serverLoad = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { if (serverLoadBuilder_ == null) { return serverLoad_; @@ -1921,6 +2465,9 @@ public final class ClusterStatusProtos { return serverLoadBuilder_.getMessage(); } } + /** + * required .ServerLoad serverLoad = 2; + */ public Builder setServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { if (serverLoadBuilder_ == null) { if (value == null) { @@ -1934,6 +2481,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerLoad serverLoad = 2; + */ public Builder setServerLoad( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { if (serverLoadBuilder_ == null) { @@ -1945,6 +2495,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerLoad serverLoad = 2; + */ public Builder mergeServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { if (serverLoadBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -1961,6 +2514,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerLoad serverLoad = 2; + */ public Builder clearServerLoad() { if (serverLoadBuilder_ == null) { serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); @@ -1971,11 +2527,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .ServerLoad serverLoad = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getServerLoadBuilder() { bitField0_ |= 0x00000002; onChanged(); return getServerLoadFieldBuilder().getBuilder(); } + /** + * required .ServerLoad serverLoad = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { if (serverLoadBuilder_ != null) { return serverLoadBuilder_.getMessageOrBuilder(); @@ -1983,6 +2545,9 @@ public final class ClusterStatusProtos { return serverLoad_; } } + /** + * required .ServerLoad serverLoad = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> getServerLoadFieldBuilder() { @@ -1996,273 +2561,650 @@ public final class ClusterStatusProtos { } return serverLoadBuilder_; } - + // @@protoc_insertion_point(builder_scope:LiveServerInfo) } - + static { defaultInstance = new LiveServerInfo(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:LiveServerInfo) } - + public interface ClusterStatusOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .HBaseVersionFileContent hbaseVersion = 1; + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ boolean hasHbaseVersion(); + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion(); + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder(); - + // repeated .LiveServerInfo liveServers = 2; + /** + * repeated .LiveServerInfo liveServers = 2; + */ java.util.List getLiveServersList(); + /** + * repeated .LiveServerInfo liveServers = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index); + /** + * repeated .LiveServerInfo liveServers = 2; + */ int getLiveServersCount(); + /** + * repeated .LiveServerInfo liveServers = 2; + */ java.util.List getLiveServersOrBuilderList(); + /** + * repeated .LiveServerInfo liveServers = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( int index); - + // repeated .ServerName deadServers = 3; + /** + * repeated .ServerName deadServers = 3; + */ java.util.List getDeadServersList(); + /** + * repeated .ServerName deadServers = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index); + /** + * repeated .ServerName deadServers = 3; + */ int getDeadServersCount(); + /** + * repeated .ServerName deadServers = 3; + */ java.util.List getDeadServersOrBuilderList(); + /** + * repeated .ServerName deadServers = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( int index); - + // repeated .RegionInTransition regionsInTransition = 4; + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ java.util.List getRegionsInTransitionList(); + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index); + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ int getRegionsInTransitionCount(); + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ java.util.List getRegionsInTransitionOrBuilderList(); + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( int index); - + // optional .ClusterId clusterId = 5; + /** + * optional .ClusterId clusterId = 5; + */ boolean hasClusterId(); + /** + * optional .ClusterId clusterId = 5; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId(); + /** + * optional .ClusterId clusterId = 5; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder(); - + // repeated .Coprocessor masterCoprocessors = 6; + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ java.util.List getMasterCoprocessorsList(); + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index); + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ int getMasterCoprocessorsCount(); + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ java.util.List getMasterCoprocessorsOrBuilderList(); + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( int index); - + // optional .ServerName master = 7; + /** + * optional .ServerName master = 7; + */ boolean hasMaster(); + /** + * optional .ServerName master = 7; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); + /** + * optional .ServerName master = 7; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); - + // repeated .ServerName backupMasters = 8; + /** + * repeated .ServerName backupMasters = 8; + */ java.util.List getBackupMastersList(); + /** + * repeated .ServerName backupMasters = 8; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index); + /** + * repeated .ServerName backupMasters = 8; + */ int getBackupMastersCount(); + /** + * repeated .ServerName backupMasters = 8; + */ java.util.List getBackupMastersOrBuilderList(); + /** + * repeated .ServerName backupMasters = 8; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( int index); - + // optional bool balancerOn = 9; + /** + * optional bool balancerOn = 9; + */ boolean hasBalancerOn(); + /** + * optional bool balancerOn = 9; + */ boolean getBalancerOn(); } + /** + * Protobuf type {@code ClusterStatus} + */ public static final class ClusterStatus extends com.google.protobuf.GeneratedMessage implements ClusterStatusOrBuilder { // Use ClusterStatus.newBuilder() to construct. - private ClusterStatus(Builder builder) { + private ClusterStatus(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ClusterStatus(boolean noInit) {} - + private ClusterStatus(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ClusterStatus defaultInstance; public static ClusterStatus getDefaultInstance() { return defaultInstance; } - + public ClusterStatus getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClusterStatus( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = hbaseVersion_.toBuilder(); + } + hbaseVersion_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(hbaseVersion_); + hbaseVersion_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + liveServers_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + liveServers_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.PARSER, extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + deadServers_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + deadServers_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + regionsInTransition_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + regionsInTransition_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.PARSER, extensionRegistry)); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = clusterId_.toBuilder(); + } + clusterId_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(clusterId_); + clusterId_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + masterCoprocessors_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + masterCoprocessors_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); + break; + } + case 58: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = master_.toBuilder(); + } + master_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(master_); + master_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + backupMasters_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000080; + } + backupMasters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + break; + } + case 72: { + bitField0_ |= 0x00000008; + balancerOn_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + liveServers_ = java.util.Collections.unmodifiableList(liveServers_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + deadServers_ = java.util.Collections.unmodifiableList(deadServers_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + regionsInTransition_ = java.util.Collections.unmodifiableList(regionsInTransition_); + } + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + masterCoprocessors_ = java.util.Collections.unmodifiableList(masterCoprocessors_); + } + if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + backupMasters_ = java.util.Collections.unmodifiableList(backupMasters_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ClusterStatus parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterStatus(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .HBaseVersionFileContent hbaseVersion = 1; public static final int HBASEVERSION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_; + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public boolean hasHbaseVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { return hbaseVersion_; } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { return hbaseVersion_; } - + // repeated .LiveServerInfo liveServers = 2; public static final int LIVESERVERS_FIELD_NUMBER = 2; private java.util.List liveServers_; + /** + * repeated .LiveServerInfo liveServers = 2; + */ public java.util.List getLiveServersList() { return liveServers_; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public java.util.List getLiveServersOrBuilderList() { return liveServers_; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public int getLiveServersCount() { return liveServers_.size(); } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { return liveServers_.get(index); } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( int index) { return liveServers_.get(index); } - + // repeated .ServerName deadServers = 3; public static final int DEADSERVERS_FIELD_NUMBER = 3; private java.util.List deadServers_; + /** + * repeated .ServerName deadServers = 3; + */ public java.util.List getDeadServersList() { return deadServers_; } + /** + * repeated .ServerName deadServers = 3; + */ public java.util.List getDeadServersOrBuilderList() { return deadServers_; } + /** + * repeated .ServerName deadServers = 3; + */ public int getDeadServersCount() { return deadServers_.size(); } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { return deadServers_.get(index); } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( int index) { return deadServers_.get(index); } - + // repeated .RegionInTransition regionsInTransition = 4; public static final int REGIONSINTRANSITION_FIELD_NUMBER = 4; private java.util.List regionsInTransition_; + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public java.util.List getRegionsInTransitionList() { return regionsInTransition_; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public java.util.List getRegionsInTransitionOrBuilderList() { return regionsInTransition_; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public int getRegionsInTransitionCount() { return regionsInTransition_.size(); } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { return regionsInTransition_.get(index); } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( int index) { return regionsInTransition_.get(index); } - + // optional .ClusterId clusterId = 5; public static final int CLUSTERID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_; + /** + * optional .ClusterId clusterId = 5; + */ public boolean hasClusterId() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ClusterId clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { return clusterId_; } + /** + * optional .ClusterId clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { return clusterId_; } - + // repeated .Coprocessor masterCoprocessors = 6; public static final int MASTERCOPROCESSORS_FIELD_NUMBER = 6; private java.util.List masterCoprocessors_; + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public java.util.List getMasterCoprocessorsList() { return masterCoprocessors_; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public java.util.List getMasterCoprocessorsOrBuilderList() { return masterCoprocessors_; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public int getMasterCoprocessorsCount() { return masterCoprocessors_.size(); } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { return masterCoprocessors_.get(index); } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( int index) { return masterCoprocessors_.get(index); } - + // optional .ServerName master = 7; public static final int MASTER_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; + /** + * optional .ServerName master = 7; + */ public boolean hasMaster() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .ServerName master = 7; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { return master_; } + /** + * optional .ServerName master = 7; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { return master_; } - + // repeated .ServerName backupMasters = 8; public static final int BACKUPMASTERS_FIELD_NUMBER = 8; private java.util.List backupMasters_; + /** + * repeated .ServerName backupMasters = 8; + */ public java.util.List getBackupMastersList() { return backupMasters_; } + /** + * repeated .ServerName backupMasters = 8; + */ public java.util.List getBackupMastersOrBuilderList() { return backupMasters_; } + /** + * repeated .ServerName backupMasters = 8; + */ public int getBackupMastersCount() { return backupMasters_.size(); } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { return backupMasters_.get(index); } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( int index) { return backupMasters_.get(index); } - + // optional bool balancerOn = 9; public static final int BALANCERON_FIELD_NUMBER = 9; private boolean balancerOn_; + /** + * optional bool balancerOn = 9; + */ public boolean hasBalancerOn() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool balancerOn = 9; + */ public boolean getBalancerOn() { return balancerOn_; } - + private void initFields() { hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); liveServers_ = java.util.Collections.emptyList(); @@ -2278,7 +3220,7 @@ public final class ClusterStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasHbaseVersion()) { if (!getHbaseVersion().isInitialized()) { memoizedIsInitialized = 0; @@ -2330,7 +3272,7 @@ public final class ClusterStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2363,12 +3305,12 @@ public final class ClusterStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2410,14 +3352,14 @@ public final class ClusterStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2427,7 +3369,7 @@ public final class ClusterStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) obj; - + boolean result = true; result = result && (hasHbaseVersion() == other.hasHbaseVersion()); if (hasHbaseVersion()) { @@ -2463,9 +3405,13 @@ public final class ClusterStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHbaseVersion()) { @@ -2505,89 +3451,79 @@ public final class ClusterStatusProtos { hash = (53 * hash) + hashBoolean(getBalancerOn()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ClusterStatus} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { @@ -2595,18 +3531,21 @@ public final class ClusterStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2625,7 +3564,7 @@ public final class ClusterStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (hbaseVersionBuilder_ == null) { @@ -2680,20 +3619,20 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000100); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus build() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); if (!result.isInitialized()) { @@ -2701,17 +3640,7 @@ public final class ClusterStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus(this); int from_bitField0_ = bitField0_; @@ -2793,7 +3722,7 @@ public final class ClusterStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)other); @@ -2802,7 +3731,7 @@ public final class ClusterStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) return this; if (other.hasHbaseVersion()) { @@ -2950,7 +3879,7 @@ public final class ClusterStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasHbaseVersion()) { if (!getHbaseVersion().isInitialized()) { @@ -3002,105 +3931,39 @@ public final class ClusterStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(); - if (hasHbaseVersion()) { - subBuilder.mergeFrom(getHbaseVersion()); - } - input.readMessage(subBuilder, extensionRegistry); - setHbaseVersion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addLiveServers(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addDeadServers(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionsInTransition(subBuilder.buildPartial()); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMasterCoprocessors(subBuilder.buildPartial()); - break; - } - case 58: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasMaster()) { - subBuilder.mergeFrom(getMaster()); - } - input.readMessage(subBuilder, extensionRegistry); - setMaster(subBuilder.buildPartial()); - break; - } - case 66: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addBackupMasters(subBuilder.buildPartial()); - break; - } - case 72: { - bitField0_ |= 0x00000100; - balancerOn_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .HBaseVersionFileContent hbaseVersion = 1; private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> hbaseVersionBuilder_; + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public boolean hasHbaseVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { if (hbaseVersionBuilder_ == null) { return hbaseVersion_; @@ -3108,6 +3971,9 @@ public final class ClusterStatusProtos { return hbaseVersionBuilder_.getMessage(); } } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public Builder setHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { if (hbaseVersionBuilder_ == null) { if (value == null) { @@ -3121,6 +3987,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public Builder setHbaseVersion( org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder builderForValue) { if (hbaseVersionBuilder_ == null) { @@ -3132,6 +4001,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public Builder mergeHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { if (hbaseVersionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3148,6 +4020,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public Builder clearHbaseVersion() { if (hbaseVersionBuilder_ == null) { hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); @@ -3158,11 +4033,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder getHbaseVersionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getHbaseVersionFieldBuilder().getBuilder(); } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { if (hbaseVersionBuilder_ != null) { return hbaseVersionBuilder_.getMessageOrBuilder(); @@ -3170,6 +4051,9 @@ public final class ClusterStatusProtos { return hbaseVersion_; } } + /** + * optional .HBaseVersionFileContent hbaseVersion = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> getHbaseVersionFieldBuilder() { @@ -3183,7 +4067,7 @@ public final class ClusterStatusProtos { } return hbaseVersionBuilder_; } - + // repeated .LiveServerInfo liveServers = 2; private java.util.List liveServers_ = java.util.Collections.emptyList(); @@ -3193,10 +4077,13 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> liveServersBuilder_; - + + /** + * repeated .LiveServerInfo liveServers = 2; + */ public java.util.List getLiveServersList() { if (liveServersBuilder_ == null) { return java.util.Collections.unmodifiableList(liveServers_); @@ -3204,6 +4091,9 @@ public final class ClusterStatusProtos { return liveServersBuilder_.getMessageList(); } } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public int getLiveServersCount() { if (liveServersBuilder_ == null) { return liveServers_.size(); @@ -3211,6 +4101,9 @@ public final class ClusterStatusProtos { return liveServersBuilder_.getCount(); } } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { if (liveServersBuilder_ == null) { return liveServers_.get(index); @@ -3218,6 +4111,9 @@ public final class ClusterStatusProtos { return liveServersBuilder_.getMessage(index); } } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder setLiveServers( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { if (liveServersBuilder_ == null) { @@ -3232,6 +4128,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder setLiveServers( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { if (liveServersBuilder_ == null) { @@ -3243,6 +4142,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder addLiveServers(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { if (liveServersBuilder_ == null) { if (value == null) { @@ -3256,6 +4158,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder addLiveServers( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { if (liveServersBuilder_ == null) { @@ -3270,6 +4175,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder addLiveServers( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { if (liveServersBuilder_ == null) { @@ -3281,6 +4189,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder addLiveServers( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { if (liveServersBuilder_ == null) { @@ -3292,6 +4203,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder addAllLiveServers( java.lang.Iterable values) { if (liveServersBuilder_ == null) { @@ -3303,6 +4217,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder clearLiveServers() { if (liveServersBuilder_ == null) { liveServers_ = java.util.Collections.emptyList(); @@ -3313,6 +4230,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public Builder removeLiveServers(int index) { if (liveServersBuilder_ == null) { ensureLiveServersIsMutable(); @@ -3323,10 +4243,16 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder getLiveServersBuilder( int index) { return getLiveServersFieldBuilder().getBuilder(index); } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( int index) { if (liveServersBuilder_ == null) { @@ -3334,6 +4260,9 @@ public final class ClusterStatusProtos { return liveServersBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public java.util.List getLiveServersOrBuilderList() { if (liveServersBuilder_ != null) { @@ -3342,15 +4271,24 @@ public final class ClusterStatusProtos { return java.util.Collections.unmodifiableList(liveServers_); } } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder() { return getLiveServersFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder( int index) { return getLiveServersFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); } + /** + * repeated .LiveServerInfo liveServers = 2; + */ public java.util.List getLiveServersBuilderList() { return getLiveServersFieldBuilder().getBuilderList(); @@ -3369,7 +4307,7 @@ public final class ClusterStatusProtos { } return liveServersBuilder_; } - + // repeated .ServerName deadServers = 3; private java.util.List deadServers_ = java.util.Collections.emptyList(); @@ -3379,10 +4317,13 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000004; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> deadServersBuilder_; - + + /** + * repeated .ServerName deadServers = 3; + */ public java.util.List getDeadServersList() { if (deadServersBuilder_ == null) { return java.util.Collections.unmodifiableList(deadServers_); @@ -3390,6 +4331,9 @@ public final class ClusterStatusProtos { return deadServersBuilder_.getMessageList(); } } + /** + * repeated .ServerName deadServers = 3; + */ public int getDeadServersCount() { if (deadServersBuilder_ == null) { return deadServers_.size(); @@ -3397,6 +4341,9 @@ public final class ClusterStatusProtos { return deadServersBuilder_.getCount(); } } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { if (deadServersBuilder_ == null) { return deadServers_.get(index); @@ -3404,6 +4351,9 @@ public final class ClusterStatusProtos { return deadServersBuilder_.getMessage(index); } } + /** + * repeated .ServerName deadServers = 3; + */ public Builder setDeadServers( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (deadServersBuilder_ == null) { @@ -3418,6 +4368,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder setDeadServers( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (deadServersBuilder_ == null) { @@ -3429,6 +4382,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder addDeadServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (deadServersBuilder_ == null) { if (value == null) { @@ -3442,6 +4398,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder addDeadServers( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (deadServersBuilder_ == null) { @@ -3456,6 +4415,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder addDeadServers( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (deadServersBuilder_ == null) { @@ -3467,6 +4429,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder addDeadServers( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (deadServersBuilder_ == null) { @@ -3478,6 +4443,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder addAllDeadServers( java.lang.Iterable values) { if (deadServersBuilder_ == null) { @@ -3489,6 +4457,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder clearDeadServers() { if (deadServersBuilder_ == null) { deadServers_ = java.util.Collections.emptyList(); @@ -3499,6 +4470,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public Builder removeDeadServers(int index) { if (deadServersBuilder_ == null) { ensureDeadServersIsMutable(); @@ -3509,10 +4483,16 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDeadServersBuilder( int index) { return getDeadServersFieldBuilder().getBuilder(index); } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( int index) { if (deadServersBuilder_ == null) { @@ -3520,6 +4500,9 @@ public final class ClusterStatusProtos { return deadServersBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .ServerName deadServers = 3; + */ public java.util.List getDeadServersOrBuilderList() { if (deadServersBuilder_ != null) { @@ -3528,15 +4511,24 @@ public final class ClusterStatusProtos { return java.util.Collections.unmodifiableList(deadServers_); } } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder() { return getDeadServersFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } + /** + * repeated .ServerName deadServers = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder( int index) { return getDeadServersFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } + /** + * repeated .ServerName deadServers = 3; + */ public java.util.List getDeadServersBuilderList() { return getDeadServersFieldBuilder().getBuilderList(); @@ -3555,7 +4547,7 @@ public final class ClusterStatusProtos { } return deadServersBuilder_; } - + // repeated .RegionInTransition regionsInTransition = 4; private java.util.List regionsInTransition_ = java.util.Collections.emptyList(); @@ -3565,10 +4557,13 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000008; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> regionsInTransitionBuilder_; - + + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public java.util.List getRegionsInTransitionList() { if (regionsInTransitionBuilder_ == null) { return java.util.Collections.unmodifiableList(regionsInTransition_); @@ -3576,6 +4571,9 @@ public final class ClusterStatusProtos { return regionsInTransitionBuilder_.getMessageList(); } } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public int getRegionsInTransitionCount() { if (regionsInTransitionBuilder_ == null) { return regionsInTransition_.size(); @@ -3583,6 +4581,9 @@ public final class ClusterStatusProtos { return regionsInTransitionBuilder_.getCount(); } } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { if (regionsInTransitionBuilder_ == null) { return regionsInTransition_.get(index); @@ -3590,6 +4591,9 @@ public final class ClusterStatusProtos { return regionsInTransitionBuilder_.getMessage(index); } } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder setRegionsInTransition( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { if (regionsInTransitionBuilder_ == null) { @@ -3604,6 +4608,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder setRegionsInTransition( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { if (regionsInTransitionBuilder_ == null) { @@ -3615,6 +4622,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder addRegionsInTransition(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { if (regionsInTransitionBuilder_ == null) { if (value == null) { @@ -3628,6 +4638,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder addRegionsInTransition( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { if (regionsInTransitionBuilder_ == null) { @@ -3642,6 +4655,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder addRegionsInTransition( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { if (regionsInTransitionBuilder_ == null) { @@ -3653,6 +4669,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder addRegionsInTransition( int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { if (regionsInTransitionBuilder_ == null) { @@ -3664,6 +4683,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder addAllRegionsInTransition( java.lang.Iterable values) { if (regionsInTransitionBuilder_ == null) { @@ -3675,6 +4697,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder clearRegionsInTransition() { if (regionsInTransitionBuilder_ == null) { regionsInTransition_ = java.util.Collections.emptyList(); @@ -3685,6 +4710,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public Builder removeRegionsInTransition(int index) { if (regionsInTransitionBuilder_ == null) { ensureRegionsInTransitionIsMutable(); @@ -3695,10 +4723,16 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder getRegionsInTransitionBuilder( int index) { return getRegionsInTransitionFieldBuilder().getBuilder(index); } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( int index) { if (regionsInTransitionBuilder_ == null) { @@ -3706,6 +4740,9 @@ public final class ClusterStatusProtos { return regionsInTransitionBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public java.util.List getRegionsInTransitionOrBuilderList() { if (regionsInTransitionBuilder_ != null) { @@ -3714,15 +4751,24 @@ public final class ClusterStatusProtos { return java.util.Collections.unmodifiableList(regionsInTransition_); } } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder() { return getRegionsInTransitionFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder( int index) { return getRegionsInTransitionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); } + /** + * repeated .RegionInTransition regionsInTransition = 4; + */ public java.util.List getRegionsInTransitionBuilderList() { return getRegionsInTransitionFieldBuilder().getBuilderList(); @@ -3741,14 +4787,20 @@ public final class ClusterStatusProtos { } return regionsInTransitionBuilder_; } - + // optional .ClusterId clusterId = 5; private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> clusterIdBuilder_; + /** + * optional .ClusterId clusterId = 5; + */ public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .ClusterId clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { if (clusterIdBuilder_ == null) { return clusterId_; @@ -3756,6 +4808,9 @@ public final class ClusterStatusProtos { return clusterIdBuilder_.getMessage(); } } + /** + * optional .ClusterId clusterId = 5; + */ public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { if (clusterIdBuilder_ == null) { if (value == null) { @@ -3769,6 +4824,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .ClusterId clusterId = 5; + */ public Builder setClusterId( org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder builderForValue) { if (clusterIdBuilder_ == null) { @@ -3780,6 +4838,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .ClusterId clusterId = 5; + */ public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -3796,6 +4857,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .ClusterId clusterId = 5; + */ public Builder clearClusterId() { if (clusterIdBuilder_ == null) { clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); @@ -3806,11 +4870,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * optional .ClusterId clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; onChanged(); return getClusterIdFieldBuilder().getBuilder(); } + /** + * optional .ClusterId clusterId = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); @@ -3818,6 +4888,9 @@ public final class ClusterStatusProtos { return clusterId_; } } + /** + * optional .ClusterId clusterId = 5; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> getClusterIdFieldBuilder() { @@ -3831,7 +4904,7 @@ public final class ClusterStatusProtos { } return clusterIdBuilder_; } - + // repeated .Coprocessor masterCoprocessors = 6; private java.util.List masterCoprocessors_ = java.util.Collections.emptyList(); @@ -3841,10 +4914,13 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000020; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> masterCoprocessorsBuilder_; - + + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public java.util.List getMasterCoprocessorsList() { if (masterCoprocessorsBuilder_ == null) { return java.util.Collections.unmodifiableList(masterCoprocessors_); @@ -3852,6 +4928,9 @@ public final class ClusterStatusProtos { return masterCoprocessorsBuilder_.getMessageList(); } } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public int getMasterCoprocessorsCount() { if (masterCoprocessorsBuilder_ == null) { return masterCoprocessors_.size(); @@ -3859,6 +4938,9 @@ public final class ClusterStatusProtos { return masterCoprocessorsBuilder_.getCount(); } } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { if (masterCoprocessorsBuilder_ == null) { return masterCoprocessors_.get(index); @@ -3866,6 +4948,9 @@ public final class ClusterStatusProtos { return masterCoprocessorsBuilder_.getMessage(index); } } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder setMasterCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (masterCoprocessorsBuilder_ == null) { @@ -3880,6 +4965,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder setMasterCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (masterCoprocessorsBuilder_ == null) { @@ -3891,6 +4979,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder addMasterCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (masterCoprocessorsBuilder_ == null) { if (value == null) { @@ -3904,6 +4995,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder addMasterCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (masterCoprocessorsBuilder_ == null) { @@ -3918,6 +5012,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder addMasterCoprocessors( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (masterCoprocessorsBuilder_ == null) { @@ -3929,6 +5026,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder addMasterCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (masterCoprocessorsBuilder_ == null) { @@ -3940,6 +5040,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder addAllMasterCoprocessors( java.lang.Iterable values) { if (masterCoprocessorsBuilder_ == null) { @@ -3951,6 +5054,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder clearMasterCoprocessors() { if (masterCoprocessorsBuilder_ == null) { masterCoprocessors_ = java.util.Collections.emptyList(); @@ -3961,6 +5067,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public Builder removeMasterCoprocessors(int index) { if (masterCoprocessorsBuilder_ == null) { ensureMasterCoprocessorsIsMutable(); @@ -3971,10 +5080,16 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getMasterCoprocessorsBuilder( int index) { return getMasterCoprocessorsFieldBuilder().getBuilder(index); } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( int index) { if (masterCoprocessorsBuilder_ == null) { @@ -3982,6 +5097,9 @@ public final class ClusterStatusProtos { return masterCoprocessorsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public java.util.List getMasterCoprocessorsOrBuilderList() { if (masterCoprocessorsBuilder_ != null) { @@ -3990,15 +5108,24 @@ public final class ClusterStatusProtos { return java.util.Collections.unmodifiableList(masterCoprocessors_); } } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder() { return getMasterCoprocessorsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder( int index) { return getMasterCoprocessorsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } + /** + * repeated .Coprocessor masterCoprocessors = 6; + */ public java.util.List getMasterCoprocessorsBuilderList() { return getMasterCoprocessorsFieldBuilder().getBuilderList(); @@ -4017,14 +5144,20 @@ public final class ClusterStatusProtos { } return masterCoprocessorsBuilder_; } - + // optional .ServerName master = 7; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; + /** + * optional .ServerName master = 7; + */ public boolean hasMaster() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional .ServerName master = 7; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { if (masterBuilder_ == null) { return master_; @@ -4032,6 +5165,9 @@ public final class ClusterStatusProtos { return masterBuilder_.getMessage(); } } + /** + * optional .ServerName master = 7; + */ public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (value == null) { @@ -4045,6 +5181,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .ServerName master = 7; + */ public Builder setMaster( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (masterBuilder_ == null) { @@ -4056,6 +5195,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .ServerName master = 7; + */ public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && @@ -4072,6 +5214,9 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000040; return this; } + /** + * optional .ServerName master = 7; + */ public Builder clearMaster() { if (masterBuilder_ == null) { master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -4082,11 +5227,17 @@ public final class ClusterStatusProtos { bitField0_ = (bitField0_ & ~0x00000040); return this; } + /** + * optional .ServerName master = 7; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { bitField0_ |= 0x00000040; onChanged(); return getMasterFieldBuilder().getBuilder(); } + /** + * optional .ServerName master = 7; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { if (masterBuilder_ != null) { return masterBuilder_.getMessageOrBuilder(); @@ -4094,6 +5245,9 @@ public final class ClusterStatusProtos { return master_; } } + /** + * optional .ServerName master = 7; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getMasterFieldBuilder() { @@ -4107,7 +5261,7 @@ public final class ClusterStatusProtos { } return masterBuilder_; } - + // repeated .ServerName backupMasters = 8; private java.util.List backupMasters_ = java.util.Collections.emptyList(); @@ -4117,10 +5271,13 @@ public final class ClusterStatusProtos { bitField0_ |= 0x00000080; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> backupMastersBuilder_; - + + /** + * repeated .ServerName backupMasters = 8; + */ public java.util.List getBackupMastersList() { if (backupMastersBuilder_ == null) { return java.util.Collections.unmodifiableList(backupMasters_); @@ -4128,6 +5285,9 @@ public final class ClusterStatusProtos { return backupMastersBuilder_.getMessageList(); } } + /** + * repeated .ServerName backupMasters = 8; + */ public int getBackupMastersCount() { if (backupMastersBuilder_ == null) { return backupMasters_.size(); @@ -4135,6 +5295,9 @@ public final class ClusterStatusProtos { return backupMastersBuilder_.getCount(); } } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { if (backupMastersBuilder_ == null) { return backupMasters_.get(index); @@ -4142,6 +5305,9 @@ public final class ClusterStatusProtos { return backupMastersBuilder_.getMessage(index); } } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder setBackupMasters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (backupMastersBuilder_ == null) { @@ -4156,6 +5322,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder setBackupMasters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (backupMastersBuilder_ == null) { @@ -4167,6 +5336,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder addBackupMasters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (backupMastersBuilder_ == null) { if (value == null) { @@ -4180,6 +5352,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder addBackupMasters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (backupMastersBuilder_ == null) { @@ -4194,6 +5369,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder addBackupMasters( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (backupMastersBuilder_ == null) { @@ -4205,6 +5383,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder addBackupMasters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (backupMastersBuilder_ == null) { @@ -4216,6 +5397,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder addAllBackupMasters( java.lang.Iterable values) { if (backupMastersBuilder_ == null) { @@ -4227,6 +5411,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder clearBackupMasters() { if (backupMastersBuilder_ == null) { backupMasters_ = java.util.Collections.emptyList(); @@ -4237,6 +5424,9 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public Builder removeBackupMasters(int index) { if (backupMastersBuilder_ == null) { ensureBackupMastersIsMutable(); @@ -4247,10 +5437,16 @@ public final class ClusterStatusProtos { } return this; } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getBackupMastersBuilder( int index) { return getBackupMastersFieldBuilder().getBuilder(index); } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( int index) { if (backupMastersBuilder_ == null) { @@ -4258,6 +5454,9 @@ public final class ClusterStatusProtos { return backupMastersBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .ServerName backupMasters = 8; + */ public java.util.List getBackupMastersOrBuilderList() { if (backupMastersBuilder_ != null) { @@ -4266,15 +5465,24 @@ public final class ClusterStatusProtos { return java.util.Collections.unmodifiableList(backupMasters_); } } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder() { return getBackupMastersFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } + /** + * repeated .ServerName backupMasters = 8; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder( int index) { return getBackupMastersFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } + /** + * repeated .ServerName backupMasters = 8; + */ public java.util.List getBackupMastersBuilderList() { return getBackupMastersFieldBuilder().getBuilderList(); @@ -4293,39 +5501,51 @@ public final class ClusterStatusProtos { } return backupMastersBuilder_; } - + // optional bool balancerOn = 9; private boolean balancerOn_ ; + /** + * optional bool balancerOn = 9; + */ public boolean hasBalancerOn() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional bool balancerOn = 9; + */ public boolean getBalancerOn() { return balancerOn_; } + /** + * optional bool balancerOn = 9; + */ public Builder setBalancerOn(boolean value) { bitField0_ |= 0x00000100; balancerOn_ = value; onChanged(); return this; } + /** + * optional bool balancerOn = 9; + */ public Builder clearBalancerOn() { bitField0_ = (bitField0_ & ~0x00000100); balancerOn_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ClusterStatus) } - + static { defaultInstance = new ClusterStatus(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ClusterStatus) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_RegionState_descriptor; private static @@ -4346,7 +5566,7 @@ public final class ClusterStatusProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ClusterStatus_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -4389,33 +5609,25 @@ public final class ClusterStatusProtos { internal_static_RegionState_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionState_descriptor, - new java.lang.String[] { "RegionInfo", "State", "Stamp", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); + new java.lang.String[] { "RegionInfo", "State", "Stamp", }); internal_static_RegionInTransition_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RegionInTransition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionInTransition_descriptor, - new java.lang.String[] { "Spec", "RegionState", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); + new java.lang.String[] { "Spec", "RegionState", }); internal_static_LiveServerInfo_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_LiveServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LiveServerInfo_descriptor, - new java.lang.String[] { "Server", "ServerLoad", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); + new java.lang.String[] { "Server", "ServerLoad", }); internal_static_ClusterStatus_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_ClusterStatus_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ClusterStatus_descriptor, - new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); + new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }); return null; } }; @@ -4427,6 +5639,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java index 983b57e..267a97f 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java @@ -10,86 +10,192 @@ public final class ComparatorProtos { } public interface ComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); - + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // optional bytes serializedComparator = 2; + /** + * optional bytes serializedComparator = 2; + */ boolean hasSerializedComparator(); + /** + * optional bytes serializedComparator = 2; + */ com.google.protobuf.ByteString getSerializedComparator(); } + /** + * Protobuf type {@code Comparator} + */ public static final class Comparator extends com.google.protobuf.GeneratedMessage implements ComparatorOrBuilder { // Use Comparator.newBuilder() to construct. - private Comparator(Builder builder) { + private Comparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Comparator(boolean noInit) {} - + private Comparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Comparator defaultInstance; public static Comparator getDefaultInstance() { return defaultInstance; } - + public Comparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Comparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serializedComparator_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Comparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Comparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bytes serializedComparator = 2; public static final int SERIALIZEDCOMPARATOR_FIELD_NUMBER = 2; private com.google.protobuf.ByteString serializedComparator_; + /** + * optional bytes serializedComparator = 2; + */ public boolean hasSerializedComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes serializedComparator = 2; + */ public com.google.protobuf.ByteString getSerializedComparator() { return serializedComparator_; } - + private void initFields() { name_ = ""; serializedComparator_ = com.google.protobuf.ByteString.EMPTY; @@ -98,7 +204,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -106,7 +212,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -118,12 +224,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -137,14 +243,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -154,7 +260,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -170,9 +276,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -184,89 +294,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getSerializedComparator().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Comparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder { @@ -274,18 +374,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -296,7 +399,7 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -305,20 +408,20 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); if (!result.isInitialized()) { @@ -326,17 +429,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator(this); int from_bitField0_ = bitField0_; @@ -353,7 +446,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator)other); @@ -362,11 +455,13 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasSerializedComparator()) { setSerializedComparator(other.getSerializedComparator()); @@ -374,7 +469,7 @@ public final class ComparatorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -382,62 +477,69 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serializedComparator_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -446,26 +548,46 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // optional bytes serializedComparator = 2; private com.google.protobuf.ByteString serializedComparator_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes serializedComparator = 2; + */ public boolean hasSerializedComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes serializedComparator = 2; + */ public com.google.protobuf.ByteString getSerializedComparator() { return serializedComparator_; } + /** + * optional bytes serializedComparator = 2; + */ public Builder setSerializedComparator(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -475,70 +597,152 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * optional bytes serializedComparator = 2; + */ public Builder clearSerializedComparator() { bitField0_ = (bitField0_ & ~0x00000002); serializedComparator_ = getDefaultInstance().getSerializedComparator(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Comparator) } - + static { defaultInstance = new Comparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Comparator) } - + public interface ByteArrayComparableOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes value = 1; + /** + * optional bytes value = 1; + */ boolean hasValue(); + /** + * optional bytes value = 1; + */ com.google.protobuf.ByteString getValue(); } + /** + * Protobuf type {@code ByteArrayComparable} + */ public static final class ByteArrayComparable extends com.google.protobuf.GeneratedMessage implements ByteArrayComparableOrBuilder { // Use ByteArrayComparable.newBuilder() to construct. - private ByteArrayComparable(Builder builder) { + private ByteArrayComparable(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ByteArrayComparable(boolean noInit) {} - + private ByteArrayComparable(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ByteArrayComparable defaultInstance; public static ByteArrayComparable getDefaultInstance() { return defaultInstance; } - + public ByteArrayComparable getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ByteArrayComparable( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ByteArrayComparable parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ByteArrayComparable(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional bytes value = 1; public static final int VALUE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString value_; + /** + * optional bytes value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes value = 1; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + private void initFields() { value_ = com.google.protobuf.ByteString.EMPTY; } @@ -546,11 +750,11 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -559,12 +763,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -574,14 +778,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -591,7 +795,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) obj; - + boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { @@ -602,9 +806,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { @@ -612,89 +820,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ByteArrayComparable} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder { @@ -702,18 +900,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -724,27 +925,27 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); if (!result.isInitialized()) { @@ -752,17 +953,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable(this); int from_bitField0_ = bitField0_; @@ -775,7 +966,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable)other); @@ -784,7 +975,7 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) return this; if (other.hasValue()) { @@ -793,53 +984,47 @@ public final class ComparatorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes value = 1; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes value = 1; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * optional bytes value = 1; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -849,74 +1034,170 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * optional bytes value = 1; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ByteArrayComparable) } - + static { defaultInstance = new ByteArrayComparable(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ByteArrayComparable) } - + public interface BinaryComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ByteArrayComparable comparable = 1; + /** + * required .ByteArrayComparable comparable = 1; + */ boolean hasComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); } + /** + * Protobuf type {@code BinaryComparator} + */ public static final class BinaryComparator extends com.google.protobuf.GeneratedMessage implements BinaryComparatorOrBuilder { // Use BinaryComparator.newBuilder() to construct. - private BinaryComparator(Builder builder) { + private BinaryComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BinaryComparator(boolean noInit) {} - + private BinaryComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BinaryComparator defaultInstance; public static BinaryComparator getDefaultInstance() { return defaultInstance; } - + public BinaryComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BinaryComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = comparable_.toBuilder(); + } + comparable_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparable_); + comparable_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BinaryComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BinaryComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { return comparable_; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { return comparable_; } - + private void initFields() { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); } @@ -924,7 +1205,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasComparable()) { memoizedIsInitialized = 0; return false; @@ -932,7 +1213,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -941,12 +1222,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -956,14 +1237,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -973,7 +1254,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) obj; - + boolean result = true; result = result && (hasComparable() == other.hasComparable()); if (hasComparable()) { @@ -984,9 +1265,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasComparable()) { @@ -994,89 +1279,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getComparable().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BinaryComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparatorOrBuilder { @@ -1084,18 +1359,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1107,7 +1385,7 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (comparableBuilder_ == null) { @@ -1118,20 +1396,20 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); if (!result.isInitialized()) { @@ -1139,17 +1417,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator(this); int from_bitField0_ = bitField0_; @@ -1166,7 +1434,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator)other); @@ -1175,7 +1443,7 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance()) return this; if (other.hasComparable()) { @@ -1184,7 +1452,7 @@ public final class ComparatorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasComparable()) { @@ -1192,52 +1460,39 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ByteArrayComparable comparable = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { return comparable_; @@ -1245,6 +1500,9 @@ public final class ComparatorProtos { return comparableBuilder_.getMessage(); } } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (value == null) { @@ -1258,6 +1516,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { if (comparableBuilder_ == null) { @@ -1269,6 +1530,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1285,6 +1549,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder clearComparable() { if (comparableBuilder_ == null) { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); @@ -1295,11 +1562,17 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { bitField0_ |= 0x00000001; onChanged(); return getComparableFieldBuilder().getBuilder(); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); @@ -1307,6 +1580,9 @@ public final class ComparatorProtos { return comparable_; } } + /** + * required .ByteArrayComparable comparable = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { @@ -1320,68 +1596,161 @@ public final class ComparatorProtos { } return comparableBuilder_; } - + // @@protoc_insertion_point(builder_scope:BinaryComparator) } - + static { defaultInstance = new BinaryComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BinaryComparator) } - + public interface BinaryPrefixComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ByteArrayComparable comparable = 1; + /** + * required .ByteArrayComparable comparable = 1; + */ boolean hasComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); } + /** + * Protobuf type {@code BinaryPrefixComparator} + */ public static final class BinaryPrefixComparator extends com.google.protobuf.GeneratedMessage implements BinaryPrefixComparatorOrBuilder { // Use BinaryPrefixComparator.newBuilder() to construct. - private BinaryPrefixComparator(Builder builder) { + private BinaryPrefixComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BinaryPrefixComparator(boolean noInit) {} - + private BinaryPrefixComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BinaryPrefixComparator defaultInstance; public static BinaryPrefixComparator getDefaultInstance() { return defaultInstance; } - + public BinaryPrefixComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BinaryPrefixComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = comparable_.toBuilder(); + } + comparable_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparable_); + comparable_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BinaryPrefixComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BinaryPrefixComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { return comparable_; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { return comparable_; } - + private void initFields() { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); } @@ -1389,7 +1758,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasComparable()) { memoizedIsInitialized = 0; return false; @@ -1397,7 +1766,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1406,12 +1775,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1421,14 +1790,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1438,7 +1807,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) obj; - + boolean result = true; result = result && (hasComparable() == other.hasComparable()); if (hasComparable()) { @@ -1449,9 +1818,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasComparable()) { @@ -1459,89 +1832,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getComparable().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BinaryPrefixComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparatorOrBuilder { @@ -1549,18 +1912,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1572,7 +1938,7 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (comparableBuilder_ == null) { @@ -1583,20 +1949,20 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); if (!result.isInitialized()) { @@ -1604,17 +1970,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator(this); int from_bitField0_ = bitField0_; @@ -1631,7 +1987,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)other); @@ -1640,7 +1996,7 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance()) return this; if (other.hasComparable()) { @@ -1649,7 +2005,7 @@ public final class ComparatorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasComparable()) { @@ -1657,52 +2013,39 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ByteArrayComparable comparable = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { return comparable_; @@ -1710,6 +2053,9 @@ public final class ComparatorProtos { return comparableBuilder_.getMessage(); } } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (value == null) { @@ -1723,6 +2069,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { if (comparableBuilder_ == null) { @@ -1734,6 +2083,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1750,6 +2102,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder clearComparable() { if (comparableBuilder_ == null) { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); @@ -1760,11 +2115,17 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { bitField0_ |= 0x00000001; onChanged(); return getComparableFieldBuilder().getBuilder(); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); @@ -1772,6 +2133,9 @@ public final class ComparatorProtos { return comparable_; } } + /** + * required .ByteArrayComparable comparable = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { @@ -1785,72 +2149,194 @@ public final class ComparatorProtos { } return comparableBuilder_; } - + // @@protoc_insertion_point(builder_scope:BinaryPrefixComparator) } - + static { defaultInstance = new BinaryPrefixComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BinaryPrefixComparator) } - + public interface BitComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ByteArrayComparable comparable = 1; + /** + * required .ByteArrayComparable comparable = 1; + */ boolean hasComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); - + // required .BitComparator.BitwiseOp bitwiseOp = 2; + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ boolean hasBitwiseOp(); + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp(); } + /** + * Protobuf type {@code BitComparator} + */ public static final class BitComparator extends com.google.protobuf.GeneratedMessage implements BitComparatorOrBuilder { // Use BitComparator.newBuilder() to construct. - private BitComparator(Builder builder) { + private BitComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BitComparator(boolean noInit) {} - + private BitComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BitComparator defaultInstance; public static BitComparator getDefaultInstance() { return defaultInstance; } - + public BitComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BitComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = comparable_.toBuilder(); + } + comparable_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparable_); + comparable_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + bitwiseOp_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BitComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BitComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code BitComparator.BitwiseOp} + */ public enum BitwiseOp implements com.google.protobuf.ProtocolMessageEnum { + /** + * AND = 1; + */ AND(0, 1), + /** + * OR = 2; + */ OR(1, 2), + /** + * XOR = 3; + */ XOR(2, 3), ; - + + /** + * AND = 1; + */ public static final int AND_VALUE = 1; + /** + * OR = 2; + */ public static final int OR_VALUE = 2; + /** + * XOR = 3; + */ public static final int XOR_VALUE = 3; - - + + public final int getNumber() { return value; } - + public static BitwiseOp valueOf(int value) { switch (value) { case 1: return AND; @@ -1859,7 +2345,7 @@ public final class ComparatorProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -1871,7 +2357,7 @@ public final class ComparatorProtos { return BitwiseOp.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -1884,11 +2370,9 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor().getEnumTypes().get(0); } - - private static final BitwiseOp[] VALUES = { - AND, OR, XOR, - }; - + + private static final BitwiseOp[] VALUES = values(); + public static BitwiseOp valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -1897,42 +2381,57 @@ public final class ComparatorProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private BitwiseOp(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:BitComparator.BitwiseOp) } - + private int bitField0_; // required .ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { return comparable_; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { return comparable_; } - + // required .BitComparator.BitwiseOp bitwiseOp = 2; public static final int BITWISEOP_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_; + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public boolean hasBitwiseOp() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { return bitwiseOp_; } - + private void initFields() { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; @@ -1941,7 +2440,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasComparable()) { memoizedIsInitialized = 0; return false; @@ -1953,7 +2452,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1965,12 +2464,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1984,14 +2483,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2001,7 +2500,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) obj; - + boolean result = true; result = result && (hasComparable() == other.hasComparable()); if (hasComparable()) { @@ -2017,9 +2516,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasComparable()) { @@ -2031,89 +2534,79 @@ public final class ComparatorProtos { hash = (53 * hash) + hashEnum(getBitwiseOp()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BitComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparatorOrBuilder { @@ -2121,18 +2614,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2144,7 +2640,7 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (comparableBuilder_ == null) { @@ -2157,20 +2653,20 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); if (!result.isInitialized()) { @@ -2178,17 +2674,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator(this); int from_bitField0_ = bitField0_; @@ -2209,7 +2695,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator)other); @@ -2218,7 +2704,7 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance()) return this; if (other.hasComparable()) { @@ -2230,7 +2716,7 @@ public final class ComparatorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasComparable()) { @@ -2242,63 +2728,39 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - bitwiseOp_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ByteArrayComparable comparable = 1; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + /** + * required .ByteArrayComparable comparable = 1; + */ public boolean hasComparable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { return comparable_; @@ -2306,6 +2768,9 @@ public final class ComparatorProtos { return comparableBuilder_.getMessage(); } } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (value == null) { @@ -2319,6 +2784,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder setComparable( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { if (comparableBuilder_ == null) { @@ -2330,6 +2798,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2346,6 +2817,9 @@ public final class ComparatorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public Builder clearComparable() { if (comparableBuilder_ == null) { comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); @@ -2356,11 +2830,17 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { bitField0_ |= 0x00000001; onChanged(); return getComparableFieldBuilder().getBuilder(); } + /** + * required .ByteArrayComparable comparable = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); @@ -2368,6 +2848,9 @@ public final class ComparatorProtos { return comparable_; } } + /** + * required .ByteArrayComparable comparable = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { @@ -2381,15 +2864,24 @@ public final class ComparatorProtos { } return comparableBuilder_; } - + // required .BitComparator.BitwiseOp bitwiseOp = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public boolean hasBitwiseOp() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { return bitwiseOp_; } + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public Builder setBitwiseOp(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value) { if (value == null) { throw new NullPointerException(); @@ -2399,90 +2891,154 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * required .BitComparator.BitwiseOp bitwiseOp = 2; + */ public Builder clearBitwiseOp() { bitField0_ = (bitField0_ & ~0x00000002); bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BitComparator) } - + static { defaultInstance = new BitComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BitComparator) } - + public interface NullComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code NullComparator} + */ public static final class NullComparator extends com.google.protobuf.GeneratedMessage implements NullComparatorOrBuilder { // Use NullComparator.newBuilder() to construct. - private NullComparator(Builder builder) { + private NullComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NullComparator(boolean noInit) {} - + private NullComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NullComparator defaultInstance; public static NullComparator getDefaultInstance() { return defaultInstance; } - + public NullComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NullComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NullComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NullComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2492,101 +3048,95 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NullComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparatorOrBuilder { @@ -2594,18 +3144,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2616,25 +3169,25 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); if (!result.isInitialized()) { @@ -2642,23 +3195,13 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator)other); @@ -2667,174 +3210,297 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:NullComparator) } - + static { defaultInstance = new NullComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NullComparator) } - + public interface RegexStringComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string pattern = 1; + /** + * required string pattern = 1; + */ boolean hasPattern(); - String getPattern(); - + /** + * required string pattern = 1; + */ + java.lang.String getPattern(); + /** + * required string pattern = 1; + */ + com.google.protobuf.ByteString + getPatternBytes(); + // required int32 patternFlags = 2; + /** + * required int32 patternFlags = 2; + */ boolean hasPatternFlags(); + /** + * required int32 patternFlags = 2; + */ int getPatternFlags(); - + // required string charset = 3; + /** + * required string charset = 3; + */ boolean hasCharset(); - String getCharset(); + /** + * required string charset = 3; + */ + java.lang.String getCharset(); + /** + * required string charset = 3; + */ + com.google.protobuf.ByteString + getCharsetBytes(); } + /** + * Protobuf type {@code RegexStringComparator} + */ public static final class RegexStringComparator extends com.google.protobuf.GeneratedMessage implements RegexStringComparatorOrBuilder { // Use RegexStringComparator.newBuilder() to construct. - private RegexStringComparator(Builder builder) { + private RegexStringComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegexStringComparator(boolean noInit) {} - + private RegexStringComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegexStringComparator defaultInstance; public static RegexStringComparator getDefaultInstance() { return defaultInstance; } - + public RegexStringComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegexStringComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + pattern_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + patternFlags_ = input.readInt32(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + charset_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegexStringComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegexStringComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string pattern = 1; public static final int PATTERN_FIELD_NUMBER = 1; private java.lang.Object pattern_; + /** + * required string pattern = 1; + */ public boolean hasPattern() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getPattern() { + /** + * required string pattern = 1; + */ + public java.lang.String getPattern() { java.lang.Object ref = pattern_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { pattern_ = s; } return s; } } - private com.google.protobuf.ByteString getPatternBytes() { + /** + * required string pattern = 1; + */ + public com.google.protobuf.ByteString + getPatternBytes() { java.lang.Object ref = pattern_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); pattern_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // required int32 patternFlags = 2; public static final int PATTERNFLAGS_FIELD_NUMBER = 2; private int patternFlags_; + /** + * required int32 patternFlags = 2; + */ public boolean hasPatternFlags() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int32 patternFlags = 2; + */ public int getPatternFlags() { return patternFlags_; } - + // required string charset = 3; public static final int CHARSET_FIELD_NUMBER = 3; private java.lang.Object charset_; + /** + * required string charset = 3; + */ public boolean hasCharset() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getCharset() { + /** + * required string charset = 3; + */ + public java.lang.String getCharset() { java.lang.Object ref = charset_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { charset_ = s; } return s; } } - private com.google.protobuf.ByteString getCharsetBytes() { + /** + * required string charset = 3; + */ + public com.google.protobuf.ByteString + getCharsetBytes() { java.lang.Object ref = charset_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); charset_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { pattern_ = ""; patternFlags_ = 0; @@ -2844,7 +3510,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPattern()) { memoizedIsInitialized = 0; return false; @@ -2860,7 +3526,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2875,12 +3541,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2898,14 +3564,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2915,7 +3581,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) obj; - + boolean result = true; result = result && (hasPattern() == other.hasPattern()); if (hasPattern()) { @@ -2936,9 +3602,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPattern()) { @@ -2954,89 +3624,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getCharset().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegexStringComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparatorOrBuilder { @@ -3044,18 +3704,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3066,7 +3729,7 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); pattern_ = ""; @@ -3077,20 +3740,20 @@ public final class ComparatorProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); if (!result.isInitialized()) { @@ -3098,17 +3761,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator(this); int from_bitField0_ = bitField0_; @@ -3129,7 +3782,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator)other); @@ -3138,22 +3791,26 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance()) return this; if (other.hasPattern()) { - setPattern(other.getPattern()); + bitField0_ |= 0x00000001; + pattern_ = other.pattern_; + onChanged(); } if (other.hasPatternFlags()) { setPatternFlags(other.getPatternFlags()); } if (other.hasCharset()) { - setCharset(other.getCharset()); + bitField0_ |= 0x00000004; + charset_ = other.charset_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPattern()) { @@ -3169,67 +3826,69 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - pattern_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - patternFlags_ = input.readInt32(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - charset_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string pattern = 1; private java.lang.Object pattern_ = ""; + /** + * required string pattern = 1; + */ public boolean hasPattern() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getPattern() { + /** + * required string pattern = 1; + */ + public java.lang.String getPattern() { java.lang.Object ref = pattern_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); pattern_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string pattern = 1; + */ + public com.google.protobuf.ByteString + getPatternBytes() { + java.lang.Object ref = pattern_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setPattern(String value) { + /** + * required string pattern = 1; + */ + public Builder setPattern( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3238,55 +3897,105 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * required string pattern = 1; + */ public Builder clearPattern() { bitField0_ = (bitField0_ & ~0x00000001); pattern_ = getDefaultInstance().getPattern(); onChanged(); return this; } - void setPattern(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string pattern = 1; + */ + public Builder setPatternBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; pattern_ = value; onChanged(); + return this; } - + // required int32 patternFlags = 2; private int patternFlags_ ; + /** + * required int32 patternFlags = 2; + */ public boolean hasPatternFlags() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int32 patternFlags = 2; + */ public int getPatternFlags() { return patternFlags_; } + /** + * required int32 patternFlags = 2; + */ public Builder setPatternFlags(int value) { bitField0_ |= 0x00000002; patternFlags_ = value; onChanged(); return this; } + /** + * required int32 patternFlags = 2; + */ public Builder clearPatternFlags() { bitField0_ = (bitField0_ & ~0x00000002); patternFlags_ = 0; onChanged(); return this; } - + // required string charset = 3; private java.lang.Object charset_ = ""; + /** + * required string charset = 3; + */ public boolean hasCharset() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getCharset() { + /** + * required string charset = 3; + */ + public java.lang.String getCharset() { java.lang.Object ref = charset_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); charset_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string charset = 3; + */ + public com.google.protobuf.ByteString + getCharsetBytes() { + java.lang.Object ref = charset_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + charset_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setCharset(String value) { + /** + * required string charset = 3; + */ + public Builder setCharset( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3295,97 +4004,197 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * required string charset = 3; + */ public Builder clearCharset() { bitField0_ = (bitField0_ & ~0x00000004); charset_ = getDefaultInstance().getCharset(); onChanged(); return this; } - void setCharset(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * required string charset = 3; + */ + public Builder setCharsetBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; charset_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:RegexStringComparator) } - + static { defaultInstance = new RegexStringComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegexStringComparator) } - + public interface SubstringComparatorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string substr = 1; + /** + * required string substr = 1; + */ boolean hasSubstr(); - String getSubstr(); + /** + * required string substr = 1; + */ + java.lang.String getSubstr(); + /** + * required string substr = 1; + */ + com.google.protobuf.ByteString + getSubstrBytes(); } + /** + * Protobuf type {@code SubstringComparator} + */ public static final class SubstringComparator extends com.google.protobuf.GeneratedMessage implements SubstringComparatorOrBuilder { // Use SubstringComparator.newBuilder() to construct. - private SubstringComparator(Builder builder) { + private SubstringComparator(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SubstringComparator(boolean noInit) {} - + private SubstringComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SubstringComparator defaultInstance; public static SubstringComparator getDefaultInstance() { return defaultInstance; } - + public SubstringComparator getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SubstringComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + substr_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SubstringComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SubstringComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string substr = 1; public static final int SUBSTR_FIELD_NUMBER = 1; private java.lang.Object substr_; + /** + * required string substr = 1; + */ public boolean hasSubstr() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getSubstr() { + /** + * required string substr = 1; + */ + public java.lang.String getSubstr() { java.lang.Object ref = substr_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { substr_ = s; } return s; } } - private com.google.protobuf.ByteString getSubstrBytes() { + /** + * required string substr = 1; + */ + public com.google.protobuf.ByteString + getSubstrBytes() { java.lang.Object ref = substr_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); substr_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { substr_ = ""; } @@ -3393,7 +4202,7 @@ public final class ComparatorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSubstr()) { memoizedIsInitialized = 0; return false; @@ -3401,7 +4210,7 @@ public final class ComparatorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3410,12 +4219,12 @@ public final class ComparatorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3425,14 +4234,14 @@ public final class ComparatorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3442,7 +4251,7 @@ public final class ComparatorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) obj; - + boolean result = true; result = result && (hasSubstr() == other.hasSubstr()); if (hasSubstr()) { @@ -3453,9 +4262,13 @@ public final class ComparatorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSubstr()) { @@ -3463,89 +4276,79 @@ public final class ComparatorProtos { hash = (53 * hash) + getSubstr().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SubstringComparator} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparatorOrBuilder { @@ -3553,18 +4356,21 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3575,27 +4381,27 @@ public final class ComparatorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); substr_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator build() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); if (!result.isInitialized()) { @@ -3603,17 +4409,7 @@ public final class ComparatorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator(this); int from_bitField0_ = bitField0_; @@ -3626,7 +4422,7 @@ public final class ComparatorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator)other); @@ -3635,16 +4431,18 @@ public final class ComparatorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance()) return this; if (other.hasSubstr()) { - setSubstr(other.getSubstr()); + bitField0_ |= 0x00000001; + substr_ = other.substr_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSubstr()) { @@ -3652,57 +4450,69 @@ public final class ComparatorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - substr_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string substr = 1; private java.lang.Object substr_ = ""; + /** + * required string substr = 1; + */ public boolean hasSubstr() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getSubstr() { + /** + * required string substr = 1; + */ + public java.lang.String getSubstr() { java.lang.Object ref = substr_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); substr_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setSubstr(String value) { + /** + * required string substr = 1; + */ + public com.google.protobuf.ByteString + getSubstrBytes() { + java.lang.Object ref = substr_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + substr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string substr = 1; + */ + public Builder setSubstr( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3711,29 +4521,40 @@ public final class ComparatorProtos { onChanged(); return this; } + /** + * required string substr = 1; + */ public Builder clearSubstr() { bitField0_ = (bitField0_ & ~0x00000001); substr_ = getDefaultInstance().getSubstr(); onChanged(); return this; } - void setSubstr(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string substr = 1; + */ + public Builder setSubstrBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; substr_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:SubstringComparator) } - + static { defaultInstance = new SubstringComparator(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SubstringComparator) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_Comparator_descriptor; private static @@ -3774,7 +4595,7 @@ public final class ComparatorProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SubstringComparator_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -3809,65 +4630,49 @@ public final class ComparatorProtos { internal_static_Comparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Comparator_descriptor, - new java.lang.String[] { "Name", "SerializedComparator", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder.class); + new java.lang.String[] { "Name", "SerializedComparator", }); internal_static_ByteArrayComparable_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_ByteArrayComparable_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ByteArrayComparable_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); + new java.lang.String[] { "Value", }); internal_static_BinaryComparator_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_BinaryComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BinaryComparator_descriptor, - new java.lang.String[] { "Comparable", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); + new java.lang.String[] { "Comparable", }); internal_static_BinaryPrefixComparator_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_BinaryPrefixComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BinaryPrefixComparator_descriptor, - new java.lang.String[] { "Comparable", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); + new java.lang.String[] { "Comparable", }); internal_static_BitComparator_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_BitComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BitComparator_descriptor, - new java.lang.String[] { "Comparable", "BitwiseOp", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); + new java.lang.String[] { "Comparable", "BitwiseOp", }); internal_static_NullComparator_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_NullComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NullComparator_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); + new java.lang.String[] { }); internal_static_RegexStringComparator_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_RegexStringComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegexStringComparator_descriptor, - new java.lang.String[] { "Pattern", "PatternFlags", "Charset", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); + new java.lang.String[] { "Pattern", "PatternFlags", "Charset", }); internal_static_SubstringComparator_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_SubstringComparator_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SubstringComparator_descriptor, - new java.lang.String[] { "Substr", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); + new java.lang.String[] { "Substr", }); return null; } }; @@ -3876,6 +4681,6 @@ public final class ComparatorProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java index 067321f..782f5b4 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java @@ -10,158 +10,324 @@ public final class ErrorHandlingProtos { } public interface StackTraceElementMessageOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string declaringClass = 1; + /** + * optional string declaringClass = 1; + */ boolean hasDeclaringClass(); - String getDeclaringClass(); - + /** + * optional string declaringClass = 1; + */ + java.lang.String getDeclaringClass(); + /** + * optional string declaringClass = 1; + */ + com.google.protobuf.ByteString + getDeclaringClassBytes(); + // optional string methodName = 2; + /** + * optional string methodName = 2; + */ boolean hasMethodName(); - String getMethodName(); - + /** + * optional string methodName = 2; + */ + java.lang.String getMethodName(); + /** + * optional string methodName = 2; + */ + com.google.protobuf.ByteString + getMethodNameBytes(); + // optional string fileName = 3; + /** + * optional string fileName = 3; + */ boolean hasFileName(); - String getFileName(); - + /** + * optional string fileName = 3; + */ + java.lang.String getFileName(); + /** + * optional string fileName = 3; + */ + com.google.protobuf.ByteString + getFileNameBytes(); + // optional int32 lineNumber = 4; + /** + * optional int32 lineNumber = 4; + */ boolean hasLineNumber(); + /** + * optional int32 lineNumber = 4; + */ int getLineNumber(); } + /** + * Protobuf type {@code StackTraceElementMessage} + * + *
+   **
+   * Protobuf version of a java.lang.StackTraceElement
+   * so we can serialize exceptions.
+   * 
+ */ public static final class StackTraceElementMessage extends com.google.protobuf.GeneratedMessage implements StackTraceElementMessageOrBuilder { // Use StackTraceElementMessage.newBuilder() to construct. - private StackTraceElementMessage(Builder builder) { + private StackTraceElementMessage(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private StackTraceElementMessage(boolean noInit) {} - + private StackTraceElementMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final StackTraceElementMessage defaultInstance; public static StackTraceElementMessage getDefaultInstance() { return defaultInstance; } - + public StackTraceElementMessage getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private StackTraceElementMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + declaringClass_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + methodName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + fileName_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + lineNumber_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StackTraceElementMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StackTraceElementMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string declaringClass = 1; public static final int DECLARINGCLASS_FIELD_NUMBER = 1; private java.lang.Object declaringClass_; + /** + * optional string declaringClass = 1; + */ public boolean hasDeclaringClass() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getDeclaringClass() { + /** + * optional string declaringClass = 1; + */ + public java.lang.String getDeclaringClass() { java.lang.Object ref = declaringClass_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { declaringClass_ = s; } return s; } } - private com.google.protobuf.ByteString getDeclaringClassBytes() { + /** + * optional string declaringClass = 1; + */ + public com.google.protobuf.ByteString + getDeclaringClassBytes() { java.lang.Object ref = declaringClass_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); declaringClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string methodName = 2; public static final int METHODNAME_FIELD_NUMBER = 2; private java.lang.Object methodName_; + /** + * optional string methodName = 2; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getMethodName() { + /** + * optional string methodName = 2; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { methodName_ = s; } return s; } } - private com.google.protobuf.ByteString getMethodNameBytes() { + /** + * optional string methodName = 2; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { java.lang.Object ref = methodName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string fileName = 3; public static final int FILENAME_FIELD_NUMBER = 3; private java.lang.Object fileName_; + /** + * optional string fileName = 3; + */ public boolean hasFileName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getFileName() { + /** + * optional string fileName = 3; + */ + public java.lang.String getFileName() { java.lang.Object ref = fileName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { fileName_ = s; } return s; } } - private com.google.protobuf.ByteString getFileNameBytes() { + /** + * optional string fileName = 3; + */ + public com.google.protobuf.ByteString + getFileNameBytes() { java.lang.Object ref = fileName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); fileName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional int32 lineNumber = 4; public static final int LINENUMBER_FIELD_NUMBER = 4; private int lineNumber_; + /** + * optional int32 lineNumber = 4; + */ public boolean hasLineNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int32 lineNumber = 4; + */ public int getLineNumber() { return lineNumber_; } - + private void initFields() { declaringClass_ = ""; methodName_ = ""; @@ -172,11 +338,11 @@ public final class ErrorHandlingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -194,12 +360,12 @@ public final class ErrorHandlingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -221,14 +387,14 @@ public final class ErrorHandlingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -238,7 +404,7 @@ public final class ErrorHandlingProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage other = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage) obj; - + boolean result = true; result = result && (hasDeclaringClass() == other.hasDeclaringClass()); if (hasDeclaringClass()) { @@ -264,9 +430,13 @@ public final class ErrorHandlingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDeclaringClass()) { @@ -286,89 +456,85 @@ public final class ErrorHandlingProtos { hash = (53 * hash) + getLineNumber(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code StackTraceElementMessage} + * + *
+     **
+     * Protobuf version of a java.lang.StackTraceElement
+     * so we can serialize exceptions.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder { @@ -376,18 +542,21 @@ public final class ErrorHandlingProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -398,7 +567,7 @@ public final class ErrorHandlingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); declaringClass_ = ""; @@ -411,20 +580,20 @@ public final class ErrorHandlingProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage build() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage result = buildPartial(); if (!result.isInitialized()) { @@ -432,17 +601,7 @@ public final class ErrorHandlingProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage result = new org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage(this); int from_bitField0_ = bitField0_; @@ -467,7 +626,7 @@ public final class ErrorHandlingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage)other); @@ -476,17 +635,23 @@ public final class ErrorHandlingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance()) return this; if (other.hasDeclaringClass()) { - setDeclaringClass(other.getDeclaringClass()); + bitField0_ |= 0x00000001; + declaringClass_ = other.declaringClass_; + onChanged(); } if (other.hasMethodName()) { - setMethodName(other.getMethodName()); + bitField0_ |= 0x00000002; + methodName_ = other.methodName_; + onChanged(); } if (other.hasFileName()) { - setFileName(other.getFileName()); + bitField0_ |= 0x00000004; + fileName_ = other.fileName_; + onChanged(); } if (other.hasLineNumber()) { setLineNumber(other.getLineNumber()); @@ -494,76 +659,73 @@ public final class ErrorHandlingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - declaringClass_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - methodName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - fileName_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - lineNumber_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string declaringClass = 1; private java.lang.Object declaringClass_ = ""; + /** + * optional string declaringClass = 1; + */ public boolean hasDeclaringClass() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getDeclaringClass() { + /** + * optional string declaringClass = 1; + */ + public java.lang.String getDeclaringClass() { java.lang.Object ref = declaringClass_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); declaringClass_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string declaringClass = 1; + */ + public com.google.protobuf.ByteString + getDeclaringClassBytes() { + java.lang.Object ref = declaringClass_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + declaringClass_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setDeclaringClass(String value) { + /** + * optional string declaringClass = 1; + */ + public Builder setDeclaringClass( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -572,34 +734,72 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string declaringClass = 1; + */ public Builder clearDeclaringClass() { bitField0_ = (bitField0_ & ~0x00000001); declaringClass_ = getDefaultInstance().getDeclaringClass(); onChanged(); return this; } - void setDeclaringClass(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string declaringClass = 1; + */ + public Builder setDeclaringClassBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; declaringClass_ = value; onChanged(); + return this; } - + // optional string methodName = 2; private java.lang.Object methodName_ = ""; + /** + * optional string methodName = 2; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getMethodName() { + /** + * optional string methodName = 2; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); methodName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setMethodName(String value) { + /** + * optional string methodName = 2; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string methodName = 2; + */ + public Builder setMethodName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -608,34 +808,72 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string methodName = 2; + */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000002); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string methodName = 2; + */ + public Builder setMethodNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; methodName_ = value; onChanged(); + return this; } - + // optional string fileName = 3; private java.lang.Object fileName_ = ""; + /** + * optional string fileName = 3; + */ public boolean hasFileName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getFileName() { + /** + * optional string fileName = 3; + */ + public java.lang.String getFileName() { java.lang.Object ref = fileName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); fileName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string fileName = 3; + */ + public com.google.protobuf.ByteString + getFileNameBytes() { + java.lang.Object ref = fileName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fileName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setFileName(String value) { + /** + * optional string fileName = 3; + */ + public Builder setFileName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -644,199 +882,404 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string fileName = 3; + */ public Builder clearFileName() { bitField0_ = (bitField0_ & ~0x00000004); fileName_ = getDefaultInstance().getFileName(); onChanged(); return this; } - void setFileName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * optional string fileName = 3; + */ + public Builder setFileNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; fileName_ = value; onChanged(); + return this; } - + // optional int32 lineNumber = 4; private int lineNumber_ ; + /** + * optional int32 lineNumber = 4; + */ public boolean hasLineNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int32 lineNumber = 4; + */ public int getLineNumber() { return lineNumber_; } + /** + * optional int32 lineNumber = 4; + */ public Builder setLineNumber(int value) { bitField0_ |= 0x00000008; lineNumber_ = value; onChanged(); return this; } + /** + * optional int32 lineNumber = 4; + */ public Builder clearLineNumber() { bitField0_ = (bitField0_ & ~0x00000008); lineNumber_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:StackTraceElementMessage) } - + static { defaultInstance = new StackTraceElementMessage(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:StackTraceElementMessage) } - + public interface GenericExceptionMessageOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string className = 1; + /** + * optional string className = 1; + */ boolean hasClassName(); - String getClassName(); - + /** + * optional string className = 1; + */ + java.lang.String getClassName(); + /** + * optional string className = 1; + */ + com.google.protobuf.ByteString + getClassNameBytes(); + // optional string message = 2; + /** + * optional string message = 2; + */ boolean hasMessage(); - String getMessage(); - + /** + * optional string message = 2; + */ + java.lang.String getMessage(); + /** + * optional string message = 2; + */ + com.google.protobuf.ByteString + getMessageBytes(); + // optional bytes errorInfo = 3; + /** + * optional bytes errorInfo = 3; + */ boolean hasErrorInfo(); + /** + * optional bytes errorInfo = 3; + */ com.google.protobuf.ByteString getErrorInfo(); - + // repeated .StackTraceElementMessage trace = 4; + /** + * repeated .StackTraceElementMessage trace = 4; + */ java.util.List getTraceList(); + /** + * repeated .StackTraceElementMessage trace = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index); + /** + * repeated .StackTraceElementMessage trace = 4; + */ int getTraceCount(); + /** + * repeated .StackTraceElementMessage trace = 4; + */ java.util.List getTraceOrBuilderList(); + /** + * repeated .StackTraceElementMessage trace = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder( int index); } + /** + * Protobuf type {@code GenericExceptionMessage} + * + *
+   **
+   * Cause of a remote failure for a generic exception. Contains
+   * all the information for a generic exception as well as
+   * optional info about the error for generic info passing
+   * (which should be another protobuffed class).
+   * 
+ */ public static final class GenericExceptionMessage extends com.google.protobuf.GeneratedMessage implements GenericExceptionMessageOrBuilder { // Use GenericExceptionMessage.newBuilder() to construct. - private GenericExceptionMessage(Builder builder) { + private GenericExceptionMessage(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GenericExceptionMessage(boolean noInit) {} - + private GenericExceptionMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GenericExceptionMessage defaultInstance; public static GenericExceptionMessage getDefaultInstance() { return defaultInstance; } - + public GenericExceptionMessage getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GenericExceptionMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + className_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + message_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + errorInfo_ = input.readBytes(); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + trace_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + trace_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + trace_ = java.util.Collections.unmodifiableList(trace_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GenericExceptionMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GenericExceptionMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string className = 1; public static final int CLASSNAME_FIELD_NUMBER = 1; private java.lang.Object className_; + /** + * optional string className = 1; + */ public boolean hasClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClassName() { + /** + * optional string className = 1; + */ + public java.lang.String getClassName() { java.lang.Object ref = className_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { className_ = s; } return s; } } - private com.google.protobuf.ByteString getClassNameBytes() { + /** + * optional string className = 1; + */ + public com.google.protobuf.ByteString + getClassNameBytes() { java.lang.Object ref = className_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); className_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string message = 2; public static final int MESSAGE_FIELD_NUMBER = 2; private java.lang.Object message_; + /** + * optional string message = 2; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getMessage() { + /** + * optional string message = 2; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { message_ = s; } return s; } } - private com.google.protobuf.ByteString getMessageBytes() { + /** + * optional string message = 2; + */ + public com.google.protobuf.ByteString + getMessageBytes() { java.lang.Object ref = message_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bytes errorInfo = 3; public static final int ERRORINFO_FIELD_NUMBER = 3; private com.google.protobuf.ByteString errorInfo_; + /** + * optional bytes errorInfo = 3; + */ public boolean hasErrorInfo() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes errorInfo = 3; + */ public com.google.protobuf.ByteString getErrorInfo() { return errorInfo_; } - + // repeated .StackTraceElementMessage trace = 4; public static final int TRACE_FIELD_NUMBER = 4; private java.util.List trace_; + /** + * repeated .StackTraceElementMessage trace = 4; + */ public java.util.List getTraceList() { return trace_; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public java.util.List getTraceOrBuilderList() { return trace_; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public int getTraceCount() { return trace_.size(); } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index) { return trace_.get(index); } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder( int index) { return trace_.get(index); } - + private void initFields() { className_ = ""; message_ = ""; @@ -847,11 +1290,11 @@ public final class ErrorHandlingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -869,12 +1312,12 @@ public final class ErrorHandlingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -896,14 +1339,14 @@ public final class ErrorHandlingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -913,7 +1356,7 @@ public final class ErrorHandlingProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage other = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage) obj; - + boolean result = true; result = result && (hasClassName() == other.hasClassName()); if (hasClassName()) { @@ -936,9 +1379,13 @@ public final class ErrorHandlingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClassName()) { @@ -958,89 +1405,87 @@ public final class ErrorHandlingProtos { hash = (53 * hash) + getTraceList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GenericExceptionMessage} + * + *
+     **
+     * Cause of a remote failure for a generic exception. Contains
+     * all the information for a generic exception as well as
+     * optional info about the error for generic info passing
+     * (which should be another protobuffed class).
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder { @@ -1048,18 +1493,21 @@ public final class ErrorHandlingProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1071,7 +1519,7 @@ public final class ErrorHandlingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); className_ = ""; @@ -1088,20 +1536,20 @@ public final class ErrorHandlingProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage build() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage result = buildPartial(); if (!result.isInitialized()) { @@ -1109,17 +1557,7 @@ public final class ErrorHandlingProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage result = new org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage(this); int from_bitField0_ = bitField0_; @@ -1149,7 +1587,7 @@ public final class ErrorHandlingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage)other); @@ -1158,14 +1596,18 @@ public final class ErrorHandlingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance()) return this; if (other.hasClassName()) { - setClassName(other.getClassName()); + bitField0_ |= 0x00000001; + className_ = other.className_; + onChanged(); } if (other.hasMessage()) { - setMessage(other.getMessage()); + bitField0_ |= 0x00000002; + message_ = other.message_; + onChanged(); } if (other.hasErrorInfo()) { setErrorInfo(other.getErrorInfo()); @@ -1199,77 +1641,73 @@ public final class ErrorHandlingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - className_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - message_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - errorInfo_ = input.readBytes(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addTrace(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string className = 1; private java.lang.Object className_ = ""; + /** + * optional string className = 1; + */ public boolean hasClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClassName() { + /** + * optional string className = 1; + */ + public java.lang.String getClassName() { java.lang.Object ref = className_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); className_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setClassName(String value) { + /** + * optional string className = 1; + */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string className = 1; + */ + public Builder setClassName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1278,34 +1716,72 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string className = 1; + */ public Builder clearClassName() { bitField0_ = (bitField0_ & ~0x00000001); className_ = getDefaultInstance().getClassName(); onChanged(); return this; } - void setClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string className = 1; + */ + public Builder setClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; className_ = value; onChanged(); + return this; } - + // optional string message = 2; private java.lang.Object message_ = ""; + /** + * optional string message = 2; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getMessage() { + /** + * optional string message = 2; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); message_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setMessage(String value) { + /** + * optional string message = 2; + */ + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string message = 2; + */ + public Builder setMessage( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1314,26 +1790,46 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string message = 2; + */ public Builder clearMessage() { bitField0_ = (bitField0_ & ~0x00000002); message_ = getDefaultInstance().getMessage(); onChanged(); return this; } - void setMessage(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string message = 2; + */ + public Builder setMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; message_ = value; onChanged(); + return this; } - + // optional bytes errorInfo = 3; private com.google.protobuf.ByteString errorInfo_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes errorInfo = 3; + */ public boolean hasErrorInfo() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes errorInfo = 3; + */ public com.google.protobuf.ByteString getErrorInfo() { return errorInfo_; } + /** + * optional bytes errorInfo = 3; + */ public Builder setErrorInfo(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1343,13 +1839,16 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional bytes errorInfo = 3; + */ public Builder clearErrorInfo() { bitField0_ = (bitField0_ & ~0x00000004); errorInfo_ = getDefaultInstance().getErrorInfo(); onChanged(); return this; } - + // repeated .StackTraceElementMessage trace = 4; private java.util.List trace_ = java.util.Collections.emptyList(); @@ -1359,10 +1858,13 @@ public final class ErrorHandlingProtos { bitField0_ |= 0x00000008; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder> traceBuilder_; - + + /** + * repeated .StackTraceElementMessage trace = 4; + */ public java.util.List getTraceList() { if (traceBuilder_ == null) { return java.util.Collections.unmodifiableList(trace_); @@ -1370,6 +1872,9 @@ public final class ErrorHandlingProtos { return traceBuilder_.getMessageList(); } } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public int getTraceCount() { if (traceBuilder_ == null) { return trace_.size(); @@ -1377,6 +1882,9 @@ public final class ErrorHandlingProtos { return traceBuilder_.getCount(); } } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index) { if (traceBuilder_ == null) { return trace_.get(index); @@ -1384,6 +1892,9 @@ public final class ErrorHandlingProtos { return traceBuilder_.getMessage(index); } } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder setTrace( int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) { if (traceBuilder_ == null) { @@ -1398,6 +1909,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder setTrace( int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) { if (traceBuilder_ == null) { @@ -1409,6 +1923,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder addTrace(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) { if (traceBuilder_ == null) { if (value == null) { @@ -1422,6 +1939,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder addTrace( int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) { if (traceBuilder_ == null) { @@ -1436,6 +1956,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder addTrace( org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) { if (traceBuilder_ == null) { @@ -1447,6 +1970,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder addTrace( int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) { if (traceBuilder_ == null) { @@ -1458,6 +1984,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder addAllTrace( java.lang.Iterable values) { if (traceBuilder_ == null) { @@ -1469,6 +1998,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder clearTrace() { if (traceBuilder_ == null) { trace_ = java.util.Collections.emptyList(); @@ -1479,6 +2011,9 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public Builder removeTrace(int index) { if (traceBuilder_ == null) { ensureTraceIsMutable(); @@ -1489,10 +2024,16 @@ public final class ErrorHandlingProtos { } return this; } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder getTraceBuilder( int index) { return getTraceFieldBuilder().getBuilder(index); } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder( int index) { if (traceBuilder_ == null) { @@ -1500,6 +2041,9 @@ public final class ErrorHandlingProtos { return traceBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public java.util.List getTraceOrBuilderList() { if (traceBuilder_ != null) { @@ -1508,15 +2052,24 @@ public final class ErrorHandlingProtos { return java.util.Collections.unmodifiableList(trace_); } } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder addTraceBuilder() { return getTraceFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance()); } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder addTraceBuilder( int index) { return getTraceFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance()); } + /** + * repeated .StackTraceElementMessage trace = 4; + */ public java.util.List getTraceBuilderList() { return getTraceFieldBuilder().getBuilderList(); @@ -1535,104 +2088,230 @@ public final class ErrorHandlingProtos { } return traceBuilder_; } - + // @@protoc_insertion_point(builder_scope:GenericExceptionMessage) } - + static { defaultInstance = new GenericExceptionMessage(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GenericExceptionMessage) } - + public interface ForeignExceptionMessageOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string source = 1; + /** + * optional string source = 1; + */ boolean hasSource(); - String getSource(); - + /** + * optional string source = 1; + */ + java.lang.String getSource(); + /** + * optional string source = 1; + */ + com.google.protobuf.ByteString + getSourceBytes(); + // optional .GenericExceptionMessage genericException = 2; + /** + * optional .GenericExceptionMessage genericException = 2; + */ boolean hasGenericException(); + /** + * optional .GenericExceptionMessage genericException = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException(); + /** + * optional .GenericExceptionMessage genericException = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder(); } + /** + * Protobuf type {@code ForeignExceptionMessage} + * + *
+   **
+   * Exception sent across the wire when a remote task needs
+   * to notify other tasks that it failed and why
+   * 
+ */ public static final class ForeignExceptionMessage extends com.google.protobuf.GeneratedMessage implements ForeignExceptionMessageOrBuilder { // Use ForeignExceptionMessage.newBuilder() to construct. - private ForeignExceptionMessage(Builder builder) { + private ForeignExceptionMessage(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ForeignExceptionMessage(boolean noInit) {} - + private ForeignExceptionMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ForeignExceptionMessage defaultInstance; public static ForeignExceptionMessage getDefaultInstance() { return defaultInstance; } - + public ForeignExceptionMessage getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ForeignExceptionMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + source_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = genericException_.toBuilder(); + } + genericException_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(genericException_); + genericException_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ForeignExceptionMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ForeignExceptionMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional string source = 1; public static final int SOURCE_FIELD_NUMBER = 1; private java.lang.Object source_; + /** + * optional string source = 1; + */ public boolean hasSource() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getSource() { + /** + * optional string source = 1; + */ + public java.lang.String getSource() { java.lang.Object ref = source_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { source_ = s; } return s; } } - private com.google.protobuf.ByteString getSourceBytes() { + /** + * optional string source = 1; + */ + public com.google.protobuf.ByteString + getSourceBytes() { java.lang.Object ref = source_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); source_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional .GenericExceptionMessage genericException = 2; public static final int GENERICEXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_; + /** + * optional .GenericExceptionMessage genericException = 2; + */ public boolean hasGenericException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() { return genericException_; } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder() { return genericException_; } - + private void initFields() { source_ = ""; genericException_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); @@ -1641,11 +2320,11 @@ public final class ErrorHandlingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1657,12 +2336,12 @@ public final class ErrorHandlingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1676,14 +2355,14 @@ public final class ErrorHandlingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1693,7 +2372,7 @@ public final class ErrorHandlingProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage other = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage) obj; - + boolean result = true; result = result && (hasSource() == other.hasSource()); if (hasSource()) { @@ -1709,9 +2388,13 @@ public final class ErrorHandlingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSource()) { @@ -1723,89 +2406,85 @@ public final class ErrorHandlingProtos { hash = (53 * hash) + getGenericException().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ForeignExceptionMessage} + * + *
+     **
+     * Exception sent across the wire when a remote task needs
+     * to notify other tasks that it failed and why
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder { @@ -1813,18 +2492,21 @@ public final class ErrorHandlingProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1836,7 +2518,7 @@ public final class ErrorHandlingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); source_ = ""; @@ -1849,20 +2531,20 @@ public final class ErrorHandlingProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage build() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage result = buildPartial(); if (!result.isInitialized()) { @@ -1870,17 +2552,7 @@ public final class ErrorHandlingProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage result = new org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage(this); int from_bitField0_ = bitField0_; @@ -1901,7 +2573,7 @@ public final class ErrorHandlingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage)other); @@ -1910,11 +2582,13 @@ public final class ErrorHandlingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) return this; if (other.hasSource()) { - setSource(other.getSource()); + bitField0_ |= 0x00000001; + source_ = other.source_; + onChanged(); } if (other.hasGenericException()) { mergeGenericException(other.getGenericException()); @@ -1922,70 +2596,73 @@ public final class ErrorHandlingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - source_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.newBuilder(); - if (hasGenericException()) { - subBuilder.mergeFrom(getGenericException()); - } - input.readMessage(subBuilder, extensionRegistry); - setGenericException(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string source = 1; private java.lang.Object source_ = ""; + /** + * optional string source = 1; + */ public boolean hasSource() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getSource() { + /** + * optional string source = 1; + */ + public java.lang.String getSource() { java.lang.Object ref = source_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); source_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setSource(String value) { + /** + * optional string source = 1; + */ + public com.google.protobuf.ByteString + getSourceBytes() { + java.lang.Object ref = source_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + source_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string source = 1; + */ + public Builder setSource( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1994,25 +2671,42 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + /** + * optional string source = 1; + */ public Builder clearSource() { bitField0_ = (bitField0_ & ~0x00000001); source_ = getDefaultInstance().getSource(); onChanged(); return this; } - void setSource(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string source = 1; + */ + public Builder setSourceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; source_ = value; onChanged(); + return this; } - + // optional .GenericExceptionMessage genericException = 2; private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder> genericExceptionBuilder_; + /** + * optional .GenericExceptionMessage genericException = 2; + */ public boolean hasGenericException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() { if (genericExceptionBuilder_ == null) { return genericException_; @@ -2020,6 +2714,9 @@ public final class ErrorHandlingProtos { return genericExceptionBuilder_.getMessage(); } } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public Builder setGenericException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage value) { if (genericExceptionBuilder_ == null) { if (value == null) { @@ -2033,6 +2730,9 @@ public final class ErrorHandlingProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public Builder setGenericException( org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder builderForValue) { if (genericExceptionBuilder_ == null) { @@ -2044,6 +2744,9 @@ public final class ErrorHandlingProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public Builder mergeGenericException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage value) { if (genericExceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -2060,6 +2763,9 @@ public final class ErrorHandlingProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public Builder clearGenericException() { if (genericExceptionBuilder_ == null) { genericException_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); @@ -2070,11 +2776,17 @@ public final class ErrorHandlingProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder getGenericExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getGenericExceptionFieldBuilder().getBuilder(); } + /** + * optional .GenericExceptionMessage genericException = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder() { if (genericExceptionBuilder_ != null) { return genericExceptionBuilder_.getMessageOrBuilder(); @@ -2082,6 +2794,9 @@ public final class ErrorHandlingProtos { return genericException_; } } + /** + * optional .GenericExceptionMessage genericException = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder> getGenericExceptionFieldBuilder() { @@ -2095,18 +2810,18 @@ public final class ErrorHandlingProtos { } return genericExceptionBuilder_; } - + // @@protoc_insertion_point(builder_scope:ForeignExceptionMessage) } - + static { defaultInstance = new ForeignExceptionMessage(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ForeignExceptionMessage) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_StackTraceElementMessage_descriptor; private static @@ -2122,7 +2837,7 @@ public final class ErrorHandlingProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ForeignExceptionMessage_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -2153,25 +2868,19 @@ public final class ErrorHandlingProtos { internal_static_StackTraceElementMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StackTraceElementMessage_descriptor, - new java.lang.String[] { "DeclaringClass", "MethodName", "FileName", "LineNumber", }, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class); + new java.lang.String[] { "DeclaringClass", "MethodName", "FileName", "LineNumber", }); internal_static_GenericExceptionMessage_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_GenericExceptionMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GenericExceptionMessage_descriptor, - new java.lang.String[] { "ClassName", "Message", "ErrorInfo", "Trace", }, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class); + new java.lang.String[] { "ClassName", "Message", "ErrorInfo", "Trace", }); internal_static_ForeignExceptionMessage_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_ForeignExceptionMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ForeignExceptionMessage_descriptor, - new java.lang.String[] { "Source", "GenericException", }, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, - org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class); + new java.lang.String[] { "Source", "GenericException", }); return null; } }; @@ -2180,6 +2889,6 @@ public final class ErrorHandlingProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java index 79d13c1..76226b4 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java @@ -10,72 +10,166 @@ public final class FSProtos { } public interface HBaseVersionFileContentOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string version = 1; + /** + * required string version = 1; + */ boolean hasVersion(); - String getVersion(); + /** + * required string version = 1; + */ + java.lang.String getVersion(); + /** + * required string version = 1; + */ + com.google.protobuf.ByteString + getVersionBytes(); } + /** + * Protobuf type {@code HBaseVersionFileContent} + * + *
+   **
+   * The ${HBASE_ROOTDIR}/hbase.version file content
+   * 
+ */ public static final class HBaseVersionFileContent extends com.google.protobuf.GeneratedMessage implements HBaseVersionFileContentOrBuilder { // Use HBaseVersionFileContent.newBuilder() to construct. - private HBaseVersionFileContent(Builder builder) { + private HBaseVersionFileContent(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private HBaseVersionFileContent(boolean noInit) {} - + private HBaseVersionFileContent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final HBaseVersionFileContent defaultInstance; public static HBaseVersionFileContent getDefaultInstance() { return defaultInstance; } - + public HBaseVersionFileContent getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HBaseVersionFileContent( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + version_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HBaseVersionFileContent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HBaseVersionFileContent(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string version = 1; public static final int VERSION_FIELD_NUMBER = 1; private java.lang.Object version_; + /** + * required string version = 1; + */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getVersion() { + /** + * required string version = 1; + */ + public java.lang.String getVersion() { java.lang.Object ref = version_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { version_ = s; } return s; } } - private com.google.protobuf.ByteString getVersionBytes() { + /** + * required string version = 1; + */ + public com.google.protobuf.ByteString + getVersionBytes() { java.lang.Object ref = version_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { version_ = ""; } @@ -83,7 +177,7 @@ public final class FSProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasVersion()) { memoizedIsInitialized = 0; return false; @@ -91,7 +185,7 @@ public final class FSProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -100,12 +194,12 @@ public final class FSProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -115,14 +209,14 @@ public final class FSProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -132,7 +226,7 @@ public final class FSProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) obj; - + boolean result = true; result = result && (hasVersion() == other.hasVersion()); if (hasVersion()) { @@ -143,9 +237,13 @@ public final class FSProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasVersion()) { @@ -153,89 +251,84 @@ public final class FSProtos { hash = (53 * hash) + getVersion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code HBaseVersionFileContent} + * + *
+     **
+     * The ${HBASE_ROOTDIR}/hbase.version file content
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { @@ -243,18 +336,21 @@ public final class FSProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -265,27 +361,27 @@ public final class FSProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); version_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent build() { org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); if (!result.isInitialized()) { @@ -293,17 +389,7 @@ public final class FSProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this); int from_bitField0_ = bitField0_; @@ -316,7 +402,7 @@ public final class FSProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)other); @@ -325,16 +411,18 @@ public final class FSProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this; if (other.hasVersion()) { - setVersion(other.getVersion()); + bitField0_ |= 0x00000001; + version_ = other.version_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasVersion()) { @@ -342,57 +430,69 @@ public final class FSProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - version_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string version = 1; private java.lang.Object version_ = ""; + /** + * required string version = 1; + */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getVersion() { + /** + * required string version = 1; + */ + public java.lang.String getVersion() { java.lang.Object ref = version_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); version_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string version = 1; + */ + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setVersion(String value) { + /** + * required string version = 1; + */ + public Builder setVersion( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -401,80 +501,201 @@ public final class FSProtos { onChanged(); return this; } + /** + * required string version = 1; + */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = getDefaultInstance().getVersion(); onChanged(); return this; } - void setVersion(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string version = 1; + */ + public Builder setVersionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; version_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:HBaseVersionFileContent) } - + static { defaultInstance = new HBaseVersionFileContent(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:HBaseVersionFileContent) } - + public interface ReferenceOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes splitkey = 1; + /** + * required bytes splitkey = 1; + */ boolean hasSplitkey(); + /** + * required bytes splitkey = 1; + */ com.google.protobuf.ByteString getSplitkey(); - + // required .Reference.Range range = 2; + /** + * required .Reference.Range range = 2; + */ boolean hasRange(); + /** + * required .Reference.Range range = 2; + */ org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange(); } + /** + * Protobuf type {@code Reference} + * + *
+   **
+   * Reference file content used when we split an hfile under a region.
+   * 
+ */ public static final class Reference extends com.google.protobuf.GeneratedMessage implements ReferenceOrBuilder { // Use Reference.newBuilder() to construct. - private Reference(Builder builder) { + private Reference(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Reference(boolean noInit) {} - + private Reference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Reference defaultInstance; public static Reference getDefaultInstance() { return defaultInstance; } - + public Reference getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Reference( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + splitkey_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + range_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Reference parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Reference(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code Reference.Range} + */ public enum Range implements com.google.protobuf.ProtocolMessageEnum { + /** + * TOP = 0; + */ TOP(0, 0), + /** + * BOTTOM = 1; + */ BOTTOM(1, 1), ; - + + /** + * TOP = 0; + */ public static final int TOP_VALUE = 0; + /** + * BOTTOM = 1; + */ public static final int BOTTOM_VALUE = 1; - - + + public final int getNumber() { return value; } - + public static Range valueOf(int value) { switch (value) { case 0: return TOP; @@ -482,7 +703,7 @@ public final class FSProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -494,7 +715,7 @@ public final class FSProtos { return Range.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -507,11 +728,9 @@ public final class FSProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor().getEnumTypes().get(0); } - - private static final Range[] VALUES = { - TOP, BOTTOM, - }; - + + private static final Range[] VALUES = values(); + public static Range valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -520,39 +739,51 @@ public final class FSProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private Range(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:Reference.Range) } - + private int bitField0_; // required bytes splitkey = 1; public static final int SPLITKEY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString splitkey_; + /** + * required bytes splitkey = 1; + */ public boolean hasSplitkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes splitkey = 1; + */ public com.google.protobuf.ByteString getSplitkey() { return splitkey_; } - + // required .Reference.Range range = 2; public static final int RANGE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_; + /** + * required .Reference.Range range = 2; + */ public boolean hasRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Reference.Range range = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { return range_; } - + private void initFields() { splitkey_ = com.google.protobuf.ByteString.EMPTY; range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; @@ -561,7 +792,7 @@ public final class FSProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSplitkey()) { memoizedIsInitialized = 0; return false; @@ -573,7 +804,7 @@ public final class FSProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -585,12 +816,12 @@ public final class FSProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -604,14 +835,14 @@ public final class FSProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -621,7 +852,7 @@ public final class FSProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) obj; - + boolean result = true; result = result && (hasSplitkey() == other.hasSplitkey()); if (hasSplitkey()) { @@ -637,9 +868,13 @@ public final class FSProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSplitkey()) { @@ -651,89 +886,84 @@ public final class FSProtos { hash = (53 * hash) + hashEnum(getRange()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Reference} + * + *
+     **
+     * Reference file content used when we split an hfile under a region.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder { @@ -741,18 +971,21 @@ public final class FSProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -763,7 +996,7 @@ public final class FSProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); splitkey_ = com.google.protobuf.ByteString.EMPTY; @@ -772,20 +1005,20 @@ public final class FSProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference build() { org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); if (!result.isInitialized()) { @@ -793,17 +1026,7 @@ public final class FSProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference(this); int from_bitField0_ = bitField0_; @@ -820,7 +1043,7 @@ public final class FSProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference)other); @@ -829,7 +1052,7 @@ public final class FSProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance()) return this; if (other.hasSplitkey()) { @@ -841,7 +1064,7 @@ public final class FSProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSplitkey()) { @@ -853,60 +1076,43 @@ public final class FSProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - splitkey_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - range_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes splitkey = 1; private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes splitkey = 1; + */ public boolean hasSplitkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes splitkey = 1; + */ public com.google.protobuf.ByteString getSplitkey() { return splitkey_; } + /** + * required bytes splitkey = 1; + */ public Builder setSplitkey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -916,21 +1122,33 @@ public final class FSProtos { onChanged(); return this; } + /** + * required bytes splitkey = 1; + */ public Builder clearSplitkey() { bitField0_ = (bitField0_ & ~0x00000001); splitkey_ = getDefaultInstance().getSplitkey(); onChanged(); return this; } - + // required .Reference.Range range = 2; private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; + /** + * required .Reference.Range range = 2; + */ public boolean hasRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .Reference.Range range = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { return range_; } + /** + * required .Reference.Range range = 2; + */ public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) { if (value == null) { throw new NullPointerException(); @@ -940,24 +1158,27 @@ public final class FSProtos { onChanged(); return this; } + /** + * required .Reference.Range range = 2; + */ public Builder clearRange() { bitField0_ = (bitField0_ & ~0x00000002); range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Reference) } - + static { defaultInstance = new Reference(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Reference) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_HBaseVersionFileContent_descriptor; private static @@ -968,7 +1189,7 @@ public final class FSProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Reference_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -994,17 +1215,13 @@ public final class FSProtos { internal_static_HBaseVersionFileContent_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_HBaseVersionFileContent_descriptor, - new java.lang.String[] { "Version", }, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); + new java.lang.String[] { "Version", }); internal_static_Reference_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_Reference_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Reference_descriptor, - new java.lang.String[] { "Splitkey", "Range", }, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class); + new java.lang.String[] { "Splitkey", "Range", }); return null; } }; @@ -1013,6 +1230,6 @@ public final class FSProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java index 0e168e2..30feabb 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java @@ -10,50 +10,129 @@ public final class FilterProtos { } public interface ColumnCountGetFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 limit = 1; + /** + * required int32 limit = 1; + */ boolean hasLimit(); + /** + * required int32 limit = 1; + */ int getLimit(); } + /** + * Protobuf type {@code ColumnCountGetFilter} + */ public static final class ColumnCountGetFilter extends com.google.protobuf.GeneratedMessage implements ColumnCountGetFilterOrBuilder { // Use ColumnCountGetFilter.newBuilder() to construct. - private ColumnCountGetFilter(Builder builder) { + private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnCountGetFilter(boolean noInit) {} - + private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnCountGetFilter defaultInstance; public static ColumnCountGetFilter getDefaultInstance() { return defaultInstance; } - + public ColumnCountGetFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnCountGetFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + limit_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnCountGetFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnCountGetFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; + /** + * required int32 limit = 1; + */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 limit = 1; + */ public int getLimit() { return limit_; } - + private void initFields() { limit_ = 0; } @@ -61,7 +140,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLimit()) { memoizedIsInitialized = 0; return false; @@ -69,7 +148,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -78,12 +157,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -93,14 +172,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -110,7 +189,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj; - + boolean result = true; result = result && (hasLimit() == other.hasLimit()); if (hasLimit()) { @@ -121,9 +200,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLimit()) { @@ -131,89 +214,79 @@ public final class FilterProtos { hash = (53 * hash) + getLimit(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ColumnCountGetFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { @@ -221,18 +294,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -243,27 +319,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); limit_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); if (!result.isInitialized()) { @@ -271,17 +347,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this); int from_bitField0_ = bitField0_; @@ -294,7 +360,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); @@ -303,7 +369,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this; if (other.hasLimit()) { @@ -312,7 +378,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLimit()) { @@ -320,133 +386,226 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - limit_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 limit = 1; private int limit_ ; + /** + * required int32 limit = 1; + */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 limit = 1; + */ public int getLimit() { return limit_; } + /** + * required int32 limit = 1; + */ public Builder setLimit(int value) { bitField0_ |= 0x00000001; limit_ = value; onChanged(); return this; } + /** + * required int32 limit = 1; + */ public Builder clearLimit() { bitField0_ = (bitField0_ & ~0x00000001); limit_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ColumnCountGetFilter) } - + static { defaultInstance = new ColumnCountGetFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ColumnCountGetFilter) } - + public interface ColumnPaginationFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 limit = 1; + /** + * required int32 limit = 1; + */ boolean hasLimit(); + /** + * required int32 limit = 1; + */ int getLimit(); - + // optional int32 offset = 2; + /** + * optional int32 offset = 2; + */ boolean hasOffset(); + /** + * optional int32 offset = 2; + */ int getOffset(); } + /** + * Protobuf type {@code ColumnPaginationFilter} + */ public static final class ColumnPaginationFilter extends com.google.protobuf.GeneratedMessage implements ColumnPaginationFilterOrBuilder { // Use ColumnPaginationFilter.newBuilder() to construct. - private ColumnPaginationFilter(Builder builder) { + private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnPaginationFilter(boolean noInit) {} - + private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnPaginationFilter defaultInstance; public static ColumnPaginationFilter getDefaultInstance() { return defaultInstance; } - + public ColumnPaginationFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnPaginationFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + limit_ = input.readInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + offset_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnPaginationFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnPaginationFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; + /** + * required int32 limit = 1; + */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 limit = 1; + */ public int getLimit() { return limit_; } - + // optional int32 offset = 2; public static final int OFFSET_FIELD_NUMBER = 2; private int offset_; + /** + * optional int32 offset = 2; + */ public boolean hasOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int32 offset = 2; + */ public int getOffset() { return offset_; } - + private void initFields() { limit_ = 0; offset_ = 0; @@ -455,7 +614,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLimit()) { memoizedIsInitialized = 0; return false; @@ -463,7 +622,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -475,12 +634,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -494,14 +653,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -511,7 +670,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj; - + boolean result = true; result = result && (hasLimit() == other.hasLimit()); if (hasLimit()) { @@ -527,9 +686,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLimit()) { @@ -541,89 +704,79 @@ public final class FilterProtos { hash = (53 * hash) + getOffset(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ColumnPaginationFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { @@ -631,18 +784,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -653,7 +809,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); limit_ = 0; @@ -662,20 +818,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); if (!result.isInitialized()) { @@ -683,17 +839,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this); int from_bitField0_ = bitField0_; @@ -710,7 +856,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); @@ -719,7 +865,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this; if (other.hasLimit()) { @@ -731,7 +877,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLimit()) { @@ -739,145 +885,228 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - limit_ = input.readInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - offset_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 limit = 1; private int limit_ ; + /** + * required int32 limit = 1; + */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 limit = 1; + */ public int getLimit() { return limit_; } + /** + * required int32 limit = 1; + */ public Builder setLimit(int value) { bitField0_ |= 0x00000001; limit_ = value; onChanged(); return this; } + /** + * required int32 limit = 1; + */ public Builder clearLimit() { bitField0_ = (bitField0_ & ~0x00000001); limit_ = 0; onChanged(); return this; } - + // optional int32 offset = 2; private int offset_ ; + /** + * optional int32 offset = 2; + */ public boolean hasOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int32 offset = 2; + */ public int getOffset() { return offset_; } + /** + * optional int32 offset = 2; + */ public Builder setOffset(int value) { bitField0_ |= 0x00000002; offset_ = value; onChanged(); return this; } + /** + * optional int32 offset = 2; + */ public Builder clearOffset() { bitField0_ = (bitField0_ & ~0x00000002); offset_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ColumnPaginationFilter) } - + static { defaultInstance = new ColumnPaginationFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ColumnPaginationFilter) } - + public interface ColumnPrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes prefix = 1; + /** + * required bytes prefix = 1; + */ boolean hasPrefix(); + /** + * required bytes prefix = 1; + */ com.google.protobuf.ByteString getPrefix(); } + /** + * Protobuf type {@code ColumnPrefixFilter} + */ public static final class ColumnPrefixFilter extends com.google.protobuf.GeneratedMessage implements ColumnPrefixFilterOrBuilder { // Use ColumnPrefixFilter.newBuilder() to construct. - private ColumnPrefixFilter(Builder builder) { + private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnPrefixFilter(boolean noInit) {} - + private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnPrefixFilter defaultInstance; public static ColumnPrefixFilter getDefaultInstance() { return defaultInstance; } - + public ColumnPrefixFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnPrefixFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + prefix_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnPrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnPrefixFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; + /** + * required bytes prefix = 1; + */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes prefix = 1; + */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } - + private void initFields() { prefix_ = com.google.protobuf.ByteString.EMPTY; } @@ -885,7 +1114,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPrefix()) { memoizedIsInitialized = 0; return false; @@ -893,7 +1122,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -902,12 +1131,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -917,14 +1146,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -934,7 +1163,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj; - + boolean result = true; result = result && (hasPrefix() == other.hasPrefix()); if (hasPrefix()) { @@ -945,9 +1174,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrefix()) { @@ -955,89 +1188,79 @@ public final class FilterProtos { hash = (53 * hash) + getPrefix().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ColumnPrefixFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { @@ -1045,18 +1268,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1067,27 +1293,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); if (!result.isInitialized()) { @@ -1095,17 +1321,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this); int from_bitField0_ = bitField0_; @@ -1118,7 +1334,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); @@ -1127,7 +1343,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this; if (other.hasPrefix()) { @@ -1136,7 +1352,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPrefix()) { @@ -1144,49 +1360,43 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - prefix_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes prefix = 1; + */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes prefix = 1; + */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } + /** + * required bytes prefix = 1; + */ public Builder setPrefix(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1196,112 +1406,245 @@ public final class FilterProtos { onChanged(); return this; } + /** + * required bytes prefix = 1; + */ public Builder clearPrefix() { bitField0_ = (bitField0_ & ~0x00000001); prefix_ = getDefaultInstance().getPrefix(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ColumnPrefixFilter) } - + static { defaultInstance = new ColumnPrefixFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ColumnPrefixFilter) } - + public interface ColumnRangeFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes minColumn = 1; + /** + * optional bytes minColumn = 1; + */ boolean hasMinColumn(); + /** + * optional bytes minColumn = 1; + */ com.google.protobuf.ByteString getMinColumn(); - + // optional bool minColumnInclusive = 2; + /** + * optional bool minColumnInclusive = 2; + */ boolean hasMinColumnInclusive(); + /** + * optional bool minColumnInclusive = 2; + */ boolean getMinColumnInclusive(); - + // optional bytes maxColumn = 3; + /** + * optional bytes maxColumn = 3; + */ boolean hasMaxColumn(); + /** + * optional bytes maxColumn = 3; + */ com.google.protobuf.ByteString getMaxColumn(); - + // optional bool maxColumnInclusive = 4; + /** + * optional bool maxColumnInclusive = 4; + */ boolean hasMaxColumnInclusive(); + /** + * optional bool maxColumnInclusive = 4; + */ boolean getMaxColumnInclusive(); } + /** + * Protobuf type {@code ColumnRangeFilter} + */ public static final class ColumnRangeFilter extends com.google.protobuf.GeneratedMessage implements ColumnRangeFilterOrBuilder { // Use ColumnRangeFilter.newBuilder() to construct. - private ColumnRangeFilter(Builder builder) { + private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnRangeFilter(boolean noInit) {} - + private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnRangeFilter defaultInstance; public static ColumnRangeFilter getDefaultInstance() { return defaultInstance; } - + public ColumnRangeFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnRangeFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + minColumn_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + minColumnInclusive_ = input.readBool(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + maxColumn_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + maxColumnInclusive_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnRangeFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnRangeFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes minColumn = 1; public static final int MINCOLUMN_FIELD_NUMBER = 1; private com.google.protobuf.ByteString minColumn_; + /** + * optional bytes minColumn = 1; + */ public boolean hasMinColumn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes minColumn = 1; + */ public com.google.protobuf.ByteString getMinColumn() { return minColumn_; } - + // optional bool minColumnInclusive = 2; public static final int MINCOLUMNINCLUSIVE_FIELD_NUMBER = 2; private boolean minColumnInclusive_; + /** + * optional bool minColumnInclusive = 2; + */ public boolean hasMinColumnInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool minColumnInclusive = 2; + */ public boolean getMinColumnInclusive() { return minColumnInclusive_; } - + // optional bytes maxColumn = 3; public static final int MAXCOLUMN_FIELD_NUMBER = 3; private com.google.protobuf.ByteString maxColumn_; + /** + * optional bytes maxColumn = 3; + */ public boolean hasMaxColumn() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes maxColumn = 3; + */ public com.google.protobuf.ByteString getMaxColumn() { return maxColumn_; } - + // optional bool maxColumnInclusive = 4; public static final int MAXCOLUMNINCLUSIVE_FIELD_NUMBER = 4; private boolean maxColumnInclusive_; + /** + * optional bool maxColumnInclusive = 4; + */ public boolean hasMaxColumnInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool maxColumnInclusive = 4; + */ public boolean getMaxColumnInclusive() { return maxColumnInclusive_; } - + private void initFields() { minColumn_ = com.google.protobuf.ByteString.EMPTY; minColumnInclusive_ = false; @@ -1312,11 +1655,11 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1334,12 +1677,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1361,14 +1704,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1378,7 +1721,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj; - + boolean result = true; result = result && (hasMinColumn() == other.hasMinColumn()); if (hasMinColumn()) { @@ -1404,9 +1747,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMinColumn()) { @@ -1426,89 +1773,79 @@ public final class FilterProtos { hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ColumnRangeFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { @@ -1516,18 +1853,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1538,7 +1878,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); minColumn_ = com.google.protobuf.ByteString.EMPTY; @@ -1551,20 +1891,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); if (!result.isInitialized()) { @@ -1572,17 +1912,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this); int from_bitField0_ = bitField0_; @@ -1607,7 +1937,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other); @@ -1616,7 +1946,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this; if (other.hasMinColumn()) { @@ -1634,68 +1964,47 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - minColumn_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - minColumnInclusive_ = input.readBool(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - maxColumn_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - maxColumnInclusive_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes minColumn = 1; private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes minColumn = 1; + */ public boolean hasMinColumn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes minColumn = 1; + */ public com.google.protobuf.ByteString getMinColumn() { return minColumn_; } + /** + * optional bytes minColumn = 1; + */ public Builder setMinColumn(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1705,42 +2014,66 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes minColumn = 1; + */ public Builder clearMinColumn() { bitField0_ = (bitField0_ & ~0x00000001); minColumn_ = getDefaultInstance().getMinColumn(); onChanged(); return this; } - + // optional bool minColumnInclusive = 2; private boolean minColumnInclusive_ ; + /** + * optional bool minColumnInclusive = 2; + */ public boolean hasMinColumnInclusive() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool minColumnInclusive = 2; + */ public boolean getMinColumnInclusive() { return minColumnInclusive_; } + /** + * optional bool minColumnInclusive = 2; + */ public Builder setMinColumnInclusive(boolean value) { bitField0_ |= 0x00000002; minColumnInclusive_ = value; onChanged(); return this; } + /** + * optional bool minColumnInclusive = 2; + */ public Builder clearMinColumnInclusive() { bitField0_ = (bitField0_ & ~0x00000002); minColumnInclusive_ = false; onChanged(); return this; } - + // optional bytes maxColumn = 3; private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes maxColumn = 3; + */ public boolean hasMaxColumn() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes maxColumn = 3; + */ public com.google.protobuf.ByteString getMaxColumn() { return maxColumn_; } + /** + * optional bytes maxColumn = 3; + */ public Builder setMaxColumn(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1750,109 +2083,240 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes maxColumn = 3; + */ public Builder clearMaxColumn() { bitField0_ = (bitField0_ & ~0x00000004); maxColumn_ = getDefaultInstance().getMaxColumn(); onChanged(); return this; } - + // optional bool maxColumnInclusive = 4; private boolean maxColumnInclusive_ ; + /** + * optional bool maxColumnInclusive = 4; + */ public boolean hasMaxColumnInclusive() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool maxColumnInclusive = 4; + */ public boolean getMaxColumnInclusive() { return maxColumnInclusive_; } + /** + * optional bool maxColumnInclusive = 4; + */ public Builder setMaxColumnInclusive(boolean value) { bitField0_ |= 0x00000008; maxColumnInclusive_ = value; onChanged(); return this; } + /** + * optional bool maxColumnInclusive = 4; + */ public Builder clearMaxColumnInclusive() { bitField0_ = (bitField0_ & ~0x00000008); maxColumnInclusive_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ColumnRangeFilter) } - + static { defaultInstance = new ColumnRangeFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ColumnRangeFilter) } - + public interface CompareFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareType compareOp = 1; + /** + * required .CompareType compareOp = 1; + */ boolean hasCompareOp(); + /** + * required .CompareType compareOp = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - + // optional .Comparator comparator = 2; + /** + * optional .Comparator comparator = 2; + */ boolean hasComparator(); + /** + * optional .Comparator comparator = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + /** + * optional .Comparator comparator = 2; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); } + /** + * Protobuf type {@code CompareFilter} + */ public static final class CompareFilter extends com.google.protobuf.GeneratedMessage implements CompareFilterOrBuilder { // Use CompareFilter.newBuilder() to construct. - private CompareFilter(Builder builder) { + private CompareFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CompareFilter(boolean noInit) {} - + private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CompareFilter defaultInstance; public static CompareFilter getDefaultInstance() { return defaultInstance; } - + public CompareFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CompareFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + compareOp_ = value; + } + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = comparator_.toBuilder(); + } + comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparator_); + comparator_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CompareFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompareFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .CompareType compareOp = 1; public static final int COMPAREOP_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; + /** + * required .CompareType compareOp = 1; + */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareType compareOp = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } - + // optional .Comparator comparator = 2; public static final int COMPARATOR_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + /** + * optional .Comparator comparator = 2; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Comparator comparator = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_; } + /** + * optional .Comparator comparator = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_; } - + private void initFields() { compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); @@ -1861,7 +2325,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareOp()) { memoizedIsInitialized = 0; return false; @@ -1875,7 +2339,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1887,12 +2351,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1906,14 +2370,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1923,7 +2387,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj; - + boolean result = true; result = result && (hasCompareOp() == other.hasCompareOp()); if (hasCompareOp()) { @@ -1939,9 +2403,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareOp()) { @@ -1953,89 +2421,79 @@ public final class FilterProtos { hash = (53 * hash) + getComparator().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CompareFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder { @@ -2043,18 +2501,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2066,7 +2527,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; @@ -2079,20 +2540,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); if (!result.isInitialized()) { @@ -2100,17 +2561,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this); int from_bitField0_ = bitField0_; @@ -2131,7 +2582,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other); @@ -2140,7 +2591,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this; if (other.hasCompareOp()) { @@ -2152,7 +2603,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareOp()) { @@ -2166,64 +2617,43 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - compareOp_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareType compareOp = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + /** + * required .CompareType compareOp = 1; + */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareType compareOp = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } + /** + * required .CompareType compareOp = 1; + */ public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); @@ -2233,20 +2663,29 @@ public final class FilterProtos { onChanged(); return this; } + /** + * required .CompareType compareOp = 1; + */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000001); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; onChanged(); return this; } - + // optional .Comparator comparator = 2; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + /** + * optional .Comparator comparator = 2; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .Comparator comparator = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_; @@ -2254,6 +2693,9 @@ public final class FilterProtos { return comparatorBuilder_.getMessage(); } } + /** + * optional .Comparator comparator = 2; + */ public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { @@ -2267,6 +2709,9 @@ public final class FilterProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Comparator comparator = 2; + */ public Builder setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { @@ -2278,6 +2723,9 @@ public final class FilterProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Comparator comparator = 2; + */ public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -2294,6 +2742,9 @@ public final class FilterProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .Comparator comparator = 2; + */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); @@ -2304,11 +2755,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .Comparator comparator = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getComparatorFieldBuilder().getBuilder(); } + /** + * optional .Comparator comparator = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); @@ -2316,6 +2773,9 @@ public final class FilterProtos { return comparator_; } } + /** + * optional .Comparator comparator = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { @@ -2329,110 +2789,254 @@ public final class FilterProtos { } return comparatorBuilder_; } - + // @@protoc_insertion_point(builder_scope:CompareFilter) } - + static { defaultInstance = new CompareFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CompareFilter) } - + public interface DependentColumnFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareFilter compareFilter = 1; + /** + * required .CompareFilter compareFilter = 1; + */ boolean hasCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - + // optional bytes columnFamily = 2; + /** + * optional bytes columnFamily = 2; + */ boolean hasColumnFamily(); + /** + * optional bytes columnFamily = 2; + */ com.google.protobuf.ByteString getColumnFamily(); - + // optional bytes columnQualifier = 3; + /** + * optional bytes columnQualifier = 3; + */ boolean hasColumnQualifier(); + /** + * optional bytes columnQualifier = 3; + */ com.google.protobuf.ByteString getColumnQualifier(); - + // optional bool dropDependentColumn = 4; + /** + * optional bool dropDependentColumn = 4; + */ boolean hasDropDependentColumn(); + /** + * optional bool dropDependentColumn = 4; + */ boolean getDropDependentColumn(); } + /** + * Protobuf type {@code DependentColumnFilter} + */ public static final class DependentColumnFilter extends com.google.protobuf.GeneratedMessage implements DependentColumnFilterOrBuilder { // Use DependentColumnFilter.newBuilder() to construct. - private DependentColumnFilter(Builder builder) { + private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DependentColumnFilter(boolean noInit) {} - + private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DependentColumnFilter defaultInstance; public static DependentColumnFilter getDefaultInstance() { return defaultInstance; } - + public DependentColumnFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DependentColumnFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = compareFilter_.toBuilder(); + } + compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compareFilter_); + compareFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnFamily_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + columnQualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + dropDependentColumn_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DependentColumnFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DependentColumnFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .CompareFilter compareFilter = 1; public static final int COMPAREFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } - + // optional bytes columnFamily = 2; public static final int COLUMNFAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnFamily_; + /** + * optional bytes columnFamily = 2; + */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes columnFamily = 2; + */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } - + // optional bytes columnQualifier = 3; public static final int COLUMNQUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnQualifier_; + /** + * optional bytes columnQualifier = 3; + */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes columnQualifier = 3; + */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } - + // optional bool dropDependentColumn = 4; public static final int DROPDEPENDENTCOLUMN_FIELD_NUMBER = 4; private boolean dropDependentColumn_; + /** + * optional bool dropDependentColumn = 4; + */ public boolean hasDropDependentColumn() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool dropDependentColumn = 4; + */ public boolean getDropDependentColumn() { return dropDependentColumn_; } - + private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; @@ -2443,7 +3047,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; @@ -2455,7 +3059,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2473,12 +3077,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2500,14 +3104,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2517,7 +3121,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj; - + boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { @@ -2543,9 +3147,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { @@ -2565,89 +3173,79 @@ public final class FilterProtos { hash = (53 * hash) + hashBoolean(getDropDependentColumn()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DependentColumnFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { @@ -2655,18 +3253,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2678,7 +3279,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { @@ -2695,20 +3296,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); if (!result.isInitialized()) { @@ -2716,17 +3317,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this); int from_bitField0_ = bitField0_; @@ -2755,7 +3346,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other); @@ -2764,7 +3355,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { @@ -2782,7 +3373,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareFilter()) { @@ -2794,67 +3385,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnFamily_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - columnQualifier_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - dropDependentColumn_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareFilter compareFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; @@ -2862,6 +3425,9 @@ public final class FilterProtos { return compareFilterBuilder_.getMessage(); } } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { @@ -2875,6 +3441,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { @@ -2886,6 +3455,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2902,6 +3474,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); @@ -2912,11 +3487,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); @@ -2924,6 +3505,9 @@ public final class FilterProtos { return compareFilter_; } } + /** + * required .CompareFilter compareFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { @@ -2937,15 +3521,24 @@ public final class FilterProtos { } return compareFilterBuilder_; } - + // optional bytes columnFamily = 2; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes columnFamily = 2; + */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes columnFamily = 2; + */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } + /** + * optional bytes columnFamily = 2; + */ public Builder setColumnFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2955,21 +3548,33 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes columnFamily = 2; + */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000002); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } - + // optional bytes columnQualifier = 3; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes columnQualifier = 3; + */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes columnQualifier = 3; + */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } + /** + * optional bytes columnQualifier = 3; + */ public Builder setColumnQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2979,95 +3584,203 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes columnQualifier = 3; + */ public Builder clearColumnQualifier() { bitField0_ = (bitField0_ & ~0x00000004); columnQualifier_ = getDefaultInstance().getColumnQualifier(); onChanged(); return this; } - + // optional bool dropDependentColumn = 4; private boolean dropDependentColumn_ ; + /** + * optional bool dropDependentColumn = 4; + */ public boolean hasDropDependentColumn() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool dropDependentColumn = 4; + */ public boolean getDropDependentColumn() { return dropDependentColumn_; } + /** + * optional bool dropDependentColumn = 4; + */ public Builder setDropDependentColumn(boolean value) { bitField0_ |= 0x00000008; dropDependentColumn_ = value; onChanged(); return this; } + /** + * optional bool dropDependentColumn = 4; + */ public Builder clearDropDependentColumn() { bitField0_ = (bitField0_ & ~0x00000008); dropDependentColumn_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:DependentColumnFilter) } - + static { defaultInstance = new DependentColumnFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DependentColumnFilter) } - + public interface FamilyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareFilter compareFilter = 1; + /** + * required .CompareFilter compareFilter = 1; + */ boolean hasCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } + /** + * Protobuf type {@code FamilyFilter} + */ public static final class FamilyFilter extends com.google.protobuf.GeneratedMessage implements FamilyFilterOrBuilder { // Use FamilyFilter.newBuilder() to construct. - private FamilyFilter(Builder builder) { + private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FamilyFilter(boolean noInit) {} - + private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FamilyFilter defaultInstance; public static FamilyFilter getDefaultInstance() { return defaultInstance; } - + public FamilyFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FamilyFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = compareFilter_.toBuilder(); + } + compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compareFilter_); + compareFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FamilyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .CompareFilter compareFilter = 1; public static final int COMPAREFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } - + private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } @@ -3075,7 +3788,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; @@ -3087,7 +3800,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3096,12 +3809,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3111,14 +3824,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3128,7 +3841,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj; - + boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { @@ -3139,9 +3852,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { @@ -3149,89 +3866,79 @@ public final class FilterProtos { hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FamilyFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { @@ -3239,18 +3946,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3262,7 +3972,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { @@ -3273,20 +3983,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); if (!result.isInitialized()) { @@ -3294,17 +4004,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this); int from_bitField0_ = bitField0_; @@ -3321,7 +4021,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other); @@ -3330,7 +4030,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { @@ -3339,7 +4039,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareFilter()) { @@ -3351,52 +4051,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareFilter compareFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; @@ -3404,6 +4091,9 @@ public final class FilterProtos { return compareFilterBuilder_.getMessage(); } } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { @@ -3417,6 +4107,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { @@ -3428,6 +4121,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3444,6 +4140,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); @@ -3454,11 +4153,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); @@ -3466,6 +4171,9 @@ public final class FilterProtos { return compareFilter_; } } + /** + * required .CompareFilter compareFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { @@ -3479,75 +4187,195 @@ public final class FilterProtos { } return compareFilterBuilder_; } - + // @@protoc_insertion_point(builder_scope:FamilyFilter) } - + static { defaultInstance = new FamilyFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FamilyFilter) } - + public interface FilterListOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .FilterList.Operator operator = 1; + /** + * required .FilterList.Operator operator = 1; + */ boolean hasOperator(); + /** + * required .FilterList.Operator operator = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); - + // repeated .Filter filters = 2; + /** + * repeated .Filter filters = 2; + */ java.util.List getFiltersList(); + /** + * repeated .Filter filters = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index); + /** + * repeated .Filter filters = 2; + */ int getFiltersCount(); + /** + * repeated .Filter filters = 2; + */ java.util.List getFiltersOrBuilderList(); + /** + * repeated .Filter filters = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( int index); } + /** + * Protobuf type {@code FilterList} + */ public static final class FilterList extends com.google.protobuf.GeneratedMessage implements FilterListOrBuilder { // Use FilterList.newBuilder() to construct. - private FilterList(Builder builder) { + private FilterList(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FilterList(boolean noInit) {} - + private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FilterList defaultInstance; public static FilterList getDefaultInstance() { return defaultInstance; } - + public FilterList getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FilterList( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + operator_ = value; + } + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + filters_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + filters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + filters_ = java.util.Collections.unmodifiableList(filters_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FilterList parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FilterList(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code FilterList.Operator} + */ public enum Operator implements com.google.protobuf.ProtocolMessageEnum { + /** + * MUST_PASS_ALL = 1; + */ MUST_PASS_ALL(0, 1), + /** + * MUST_PASS_ONE = 2; + */ MUST_PASS_ONE(1, 2), ; - + + /** + * MUST_PASS_ALL = 1; + */ public static final int MUST_PASS_ALL_VALUE = 1; + /** + * MUST_PASS_ONE = 2; + */ public static final int MUST_PASS_ONE_VALUE = 2; - - + + public final int getNumber() { return value; } - + public static Operator valueOf(int value) { switch (value) { case 1: return MUST_PASS_ALL; @@ -3555,7 +4383,7 @@ public final class FilterProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -3567,7 +4395,7 @@ public final class FilterProtos { return Operator.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -3580,11 +4408,9 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0); } - - private static final Operator[] VALUES = { - MUST_PASS_ALL, MUST_PASS_ONE, - }; - + + private static final Operator[] VALUES = values(); + public static Operator valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -3593,50 +4419,71 @@ public final class FilterProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private Operator(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:FilterList.Operator) } - + private int bitField0_; // required .FilterList.Operator operator = 1; public static final int OPERATOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_; + /** + * required .FilterList.Operator operator = 1; + */ public boolean hasOperator() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .FilterList.Operator operator = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { return operator_; } - + // repeated .Filter filters = 2; public static final int FILTERS_FIELD_NUMBER = 2; private java.util.List filters_; + /** + * repeated .Filter filters = 2; + */ public java.util.List getFiltersList() { return filters_; } + /** + * repeated .Filter filters = 2; + */ public java.util.List getFiltersOrBuilderList() { return filters_; } + /** + * repeated .Filter filters = 2; + */ public int getFiltersCount() { return filters_.size(); } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { return filters_.get(index); } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( int index) { return filters_.get(index); } - + private void initFields() { operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; filters_ = java.util.Collections.emptyList(); @@ -3645,7 +4492,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasOperator()) { memoizedIsInitialized = 0; return false; @@ -3659,7 +4506,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3671,12 +4518,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3690,14 +4537,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3707,7 +4554,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj; - + boolean result = true; result = result && (hasOperator() == other.hasOperator()); if (hasOperator()) { @@ -3720,9 +4567,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOperator()) { @@ -3734,89 +4585,79 @@ public final class FilterProtos { hash = (53 * hash) + getFiltersList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FilterList} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder { @@ -3824,18 +4665,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3847,7 +4691,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; @@ -3860,20 +4704,20 @@ public final class FilterProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); if (!result.isInitialized()) { @@ -3881,17 +4725,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this); int from_bitField0_ = bitField0_; @@ -3913,7 +4747,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other); @@ -3922,7 +4756,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this; if (other.hasOperator()) { @@ -3957,7 +4791,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasOperator()) { @@ -3971,61 +4805,43 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - operator_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFilters(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .FilterList.Operator operator = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + /** + * required .FilterList.Operator operator = 1; + */ public boolean hasOperator() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .FilterList.Operator operator = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { return operator_; } + /** + * required .FilterList.Operator operator = 1; + */ public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) { if (value == null) { throw new NullPointerException(); @@ -4035,13 +4851,16 @@ public final class FilterProtos { onChanged(); return this; } + /** + * required .FilterList.Operator operator = 1; + */ public Builder clearOperator() { bitField0_ = (bitField0_ & ~0x00000001); operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; onChanged(); return this; } - + // repeated .Filter filters = 2; private java.util.List filters_ = java.util.Collections.emptyList(); @@ -4051,10 +4870,13 @@ public final class FilterProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filtersBuilder_; - + + /** + * repeated .Filter filters = 2; + */ public java.util.List getFiltersList() { if (filtersBuilder_ == null) { return java.util.Collections.unmodifiableList(filters_); @@ -4062,6 +4884,9 @@ public final class FilterProtos { return filtersBuilder_.getMessageList(); } } + /** + * repeated .Filter filters = 2; + */ public int getFiltersCount() { if (filtersBuilder_ == null) { return filters_.size(); @@ -4069,6 +4894,9 @@ public final class FilterProtos { return filtersBuilder_.getCount(); } } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { if (filtersBuilder_ == null) { return filters_.get(index); @@ -4076,6 +4904,9 @@ public final class FilterProtos { return filtersBuilder_.getMessage(index); } } + /** + * repeated .Filter filters = 2; + */ public Builder setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filtersBuilder_ == null) { @@ -4090,6 +4921,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { @@ -4101,6 +4935,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filtersBuilder_ == null) { if (value == null) { @@ -4114,6 +4951,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filtersBuilder_ == null) { @@ -4128,6 +4968,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder addFilters( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { @@ -4139,6 +4982,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filtersBuilder_ == null) { @@ -4150,6 +4996,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder addAllFilters( java.lang.Iterable values) { if (filtersBuilder_ == null) { @@ -4161,6 +5010,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder clearFilters() { if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); @@ -4171,6 +5023,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public Builder removeFilters(int index) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); @@ -4181,10 +5036,16 @@ public final class FilterProtos { } return this; } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFiltersBuilder( int index) { return getFiltersFieldBuilder().getBuilder(index); } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( int index) { if (filtersBuilder_ == null) { @@ -4192,6 +5053,9 @@ public final class FilterProtos { return filtersBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Filter filters = 2; + */ public java.util.List getFiltersOrBuilderList() { if (filtersBuilder_ != null) { @@ -4200,15 +5064,24 @@ public final class FilterProtos { return java.util.Collections.unmodifiableList(filters_); } } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder() { return getFiltersFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); } + /** + * repeated .Filter filters = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder( int index) { return getFiltersFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); } + /** + * repeated .Filter filters = 2; + */ public java.util.List getFiltersBuilderList() { return getFiltersFieldBuilder().getBuilderList(); @@ -4227,68 +5100,161 @@ public final class FilterProtos { } return filtersBuilder_; } - + // @@protoc_insertion_point(builder_scope:FilterList) } - + static { defaultInstance = new FilterList(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FilterList) } - + public interface FilterWrapperOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .Filter filter = 1; + /** + * required .Filter filter = 1; + */ boolean hasFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); } + /** + * Protobuf type {@code FilterWrapper} + */ public static final class FilterWrapper extends com.google.protobuf.GeneratedMessage implements FilterWrapperOrBuilder { // Use FilterWrapper.newBuilder() to construct. - private FilterWrapper(Builder builder) { + private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FilterWrapper(boolean noInit) {} - + private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FilterWrapper defaultInstance; public static FilterWrapper getDefaultInstance() { return defaultInstance; } - + public FilterWrapper getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FilterWrapper( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FilterWrapper parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FilterWrapper(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { return filter_; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } - + private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); } @@ -4296,7 +5262,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFilter()) { memoizedIsInitialized = 0; return false; @@ -4308,7 +5274,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4317,12 +5283,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4332,14 +5298,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4349,7 +5315,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj; - + boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { @@ -4360,9 +5326,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { @@ -4370,89 +5340,79 @@ public final class FilterProtos { hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FilterWrapper} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { @@ -4460,18 +5420,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4483,7 +5446,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (filterBuilder_ == null) { @@ -4494,20 +5457,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); if (!result.isInitialized()) { @@ -4515,17 +5478,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this); int from_bitField0_ = bitField0_; @@ -4542,7 +5495,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other); @@ -4551,7 +5504,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this; if (other.hasFilter()) { @@ -4560,7 +5513,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFilter()) { @@ -4572,52 +5525,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; @@ -4625,6 +5565,9 @@ public final class FilterProtos { return filterBuilder_.getMessage(); } } + /** + * required .Filter filter = 1; + */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { @@ -4638,6 +5581,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { @@ -4649,6 +5595,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -4665,6 +5614,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); @@ -4675,11 +5627,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); @@ -4687,6 +5645,9 @@ public final class FilterProtos { return filter_; } } + /** + * required .Filter filter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> getFilterFieldBuilder() { @@ -4700,84 +5661,145 @@ public final class FilterProtos { } return filterBuilder_; } - + // @@protoc_insertion_point(builder_scope:FilterWrapper) } - + static { defaultInstance = new FilterWrapper(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FilterWrapper) } - + public interface FirstKeyOnlyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code FirstKeyOnlyFilter} + */ public static final class FirstKeyOnlyFilter extends com.google.protobuf.GeneratedMessage implements FirstKeyOnlyFilterOrBuilder { // Use FirstKeyOnlyFilter.newBuilder() to construct. - private FirstKeyOnlyFilter(Builder builder) { + private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FirstKeyOnlyFilter(boolean noInit) {} - + private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FirstKeyOnlyFilter defaultInstance; public static FirstKeyOnlyFilter getDefaultInstance() { return defaultInstance; } - + public FirstKeyOnlyFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FirstKeyOnlyFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); } - - private void initFields() { + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FirstKeyOnlyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FirstKeyOnlyFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4787,101 +5809,95 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FirstKeyOnlyFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { @@ -4889,18 +5905,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4911,25 +5930,25 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); if (!result.isInitialized()) { @@ -4937,23 +5956,13 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); @@ -4962,118 +5971,199 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:FirstKeyOnlyFilter) } - + static { defaultInstance = new FirstKeyOnlyFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FirstKeyOnlyFilter) } - + public interface FirstKeyValueMatchingQualifiersFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated bytes qualifiers = 1; + /** + * repeated bytes qualifiers = 1; + */ java.util.List getQualifiersList(); + /** + * repeated bytes qualifiers = 1; + */ int getQualifiersCount(); + /** + * repeated bytes qualifiers = 1; + */ com.google.protobuf.ByteString getQualifiers(int index); } + /** + * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter} + */ public static final class FirstKeyValueMatchingQualifiersFilter extends com.google.protobuf.GeneratedMessage implements FirstKeyValueMatchingQualifiersFilterOrBuilder { // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. - private FirstKeyValueMatchingQualifiersFilter(Builder builder) { + private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FirstKeyValueMatchingQualifiersFilter(boolean noInit) {} - + private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { return defaultInstance; } - + public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FirstKeyValueMatchingQualifiersFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + qualifiers_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + qualifiers_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FirstKeyValueMatchingQualifiersFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated bytes qualifiers = 1; public static final int QUALIFIERS_FIELD_NUMBER = 1; private java.util.List qualifiers_; + /** + * repeated bytes qualifiers = 1; + */ public java.util.List getQualifiersList() { return qualifiers_; } + /** + * repeated bytes qualifiers = 1; + */ public int getQualifiersCount() { return qualifiers_.size(); } + /** + * repeated bytes qualifiers = 1; + */ public com.google.protobuf.ByteString getQualifiers(int index) { return qualifiers_.get(index); } - + private void initFields() { - qualifiers_ = java.util.Collections.emptyList();; + qualifiers_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5082,12 +6172,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -5102,14 +6192,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5119,7 +6209,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj; - + boolean result = true; result = result && getQualifiersList() .equals(other.getQualifiersList()); @@ -5127,9 +6217,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getQualifiersCount() > 0) { @@ -5137,89 +6231,79 @@ public final class FilterProtos { hash = (53 * hash) + getQualifiersList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { @@ -5227,18 +6311,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5249,27 +6336,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - qualifiers_ = java.util.Collections.emptyList();; + qualifiers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); if (!result.isInitialized()) { @@ -5277,17 +6364,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this); int from_bitField0_ = bitField0_; @@ -5299,7 +6376,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); @@ -5308,7 +6385,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this; if (!other.qualifiers_.isEmpty()) { @@ -5324,63 +6401,60 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureQualifiersIsMutable(); - qualifiers_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated bytes qualifiers = 1; - private java.util.List qualifiers_ = java.util.Collections.emptyList();; + private java.util.List qualifiers_ = java.util.Collections.emptyList(); private void ensureQualifiersIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = new java.util.ArrayList(qualifiers_); bitField0_ |= 0x00000001; } } + /** + * repeated bytes qualifiers = 1; + */ public java.util.List getQualifiersList() { return java.util.Collections.unmodifiableList(qualifiers_); } + /** + * repeated bytes qualifiers = 1; + */ public int getQualifiersCount() { return qualifiers_.size(); } + /** + * repeated bytes qualifiers = 1; + */ public com.google.protobuf.ByteString getQualifiers(int index) { return qualifiers_.get(index); } + /** + * repeated bytes qualifiers = 1; + */ public Builder setQualifiers( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -5391,6 +6465,9 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes qualifiers = 1; + */ public Builder addQualifiers(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -5400,6 +6477,9 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes qualifiers = 1; + */ public Builder addAllQualifiers( java.lang.Iterable values) { ensureQualifiersIsMutable(); @@ -5407,86 +6487,192 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes qualifiers = 1; + */ public Builder clearQualifiers() { - qualifiers_ = java.util.Collections.emptyList();; + qualifiers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:FirstKeyValueMatchingQualifiersFilter) } - + static { defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FirstKeyValueMatchingQualifiersFilter) } - + public interface FuzzyRowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .BytesBytesPair fuzzyKeysData = 1; + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ java.util.List getFuzzyKeysDataList(); + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index); + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ int getFuzzyKeysDataCount(); + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ java.util.List getFuzzyKeysDataOrBuilderList(); + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index); } + /** + * Protobuf type {@code FuzzyRowFilter} + */ public static final class FuzzyRowFilter extends com.google.protobuf.GeneratedMessage implements FuzzyRowFilterOrBuilder { // Use FuzzyRowFilter.newBuilder() to construct. - private FuzzyRowFilter(Builder builder) { + private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FuzzyRowFilter(boolean noInit) {} - + private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FuzzyRowFilter defaultInstance; public static FuzzyRowFilter getDefaultInstance() { return defaultInstance; } - + public FuzzyRowFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FuzzyRowFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + fuzzyKeysData_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FuzzyRowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FuzzyRowFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .BytesBytesPair fuzzyKeysData = 1; public static final int FUZZYKEYSDATA_FIELD_NUMBER = 1; private java.util.List fuzzyKeysData_; + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public java.util.List getFuzzyKeysDataList() { return fuzzyKeysData_; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public java.util.List getFuzzyKeysDataOrBuilderList() { return fuzzyKeysData_; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public int getFuzzyKeysDataCount() { return fuzzyKeysData_.size(); } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { return fuzzyKeysData_.get(index); } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index) { return fuzzyKeysData_.get(index); } - + private void initFields() { fuzzyKeysData_ = java.util.Collections.emptyList(); } @@ -5494,7 +6680,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { memoizedIsInitialized = 0; @@ -5504,7 +6690,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5513,12 +6699,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < fuzzyKeysData_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -5528,14 +6714,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5545,7 +6731,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj; - + boolean result = true; result = result && getFuzzyKeysDataList() .equals(other.getFuzzyKeysDataList()); @@ -5553,9 +6739,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getFuzzyKeysDataCount() > 0) { @@ -5563,89 +6753,79 @@ public final class FilterProtos { hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FuzzyRowFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { @@ -5653,18 +6833,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5676,7 +6859,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (fuzzyKeysDataBuilder_ == null) { @@ -5687,20 +6870,20 @@ public final class FilterProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); if (!result.isInitialized()) { @@ -5708,17 +6891,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this); int from_bitField0_ = bitField0_; @@ -5734,7 +6907,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other); @@ -5743,7 +6916,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this; if (fuzzyKeysDataBuilder_ == null) { @@ -5775,7 +6948,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { @@ -5785,42 +6958,26 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFuzzyKeysData(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .BytesBytesPair fuzzyKeysData = 1; private java.util.List fuzzyKeysData_ = java.util.Collections.emptyList(); @@ -5830,10 +6987,13 @@ public final class FilterProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; - + + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public java.util.List getFuzzyKeysDataList() { if (fuzzyKeysDataBuilder_ == null) { return java.util.Collections.unmodifiableList(fuzzyKeysData_); @@ -5841,6 +7001,9 @@ public final class FilterProtos { return fuzzyKeysDataBuilder_.getMessageList(); } } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public int getFuzzyKeysDataCount() { if (fuzzyKeysDataBuilder_ == null) { return fuzzyKeysData_.size(); @@ -5848,6 +7011,9 @@ public final class FilterProtos { return fuzzyKeysDataBuilder_.getCount(); } } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { if (fuzzyKeysDataBuilder_ == null) { return fuzzyKeysData_.get(index); @@ -5855,6 +7021,9 @@ public final class FilterProtos { return fuzzyKeysDataBuilder_.getMessage(index); } } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { @@ -5869,6 +7038,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { @@ -5880,6 +7052,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { if (value == null) { @@ -5893,6 +7068,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (fuzzyKeysDataBuilder_ == null) { @@ -5907,6 +7085,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder addFuzzyKeysData( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { @@ -5918,6 +7099,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (fuzzyKeysDataBuilder_ == null) { @@ -5929,6 +7113,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder addAllFuzzyKeysData( java.lang.Iterable values) { if (fuzzyKeysDataBuilder_ == null) { @@ -5940,6 +7127,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder clearFuzzyKeysData() { if (fuzzyKeysDataBuilder_ == null) { fuzzyKeysData_ = java.util.Collections.emptyList(); @@ -5950,6 +7140,9 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public Builder removeFuzzyKeysData(int index) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); @@ -5960,10 +7153,16 @@ public final class FilterProtos { } return this; } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder( int index) { return getFuzzyKeysDataFieldBuilder().getBuilder(index); } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( int index) { if (fuzzyKeysDataBuilder_ == null) { @@ -5971,6 +7170,9 @@ public final class FilterProtos { return fuzzyKeysDataBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public java.util.List getFuzzyKeysDataOrBuilderList() { if (fuzzyKeysDataBuilder_ != null) { @@ -5979,15 +7181,24 @@ public final class FilterProtos { return java.util.Collections.unmodifiableList(fuzzyKeysData_); } } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() { return getFuzzyKeysDataFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder( int index) { return getFuzzyKeysDataFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair fuzzyKeysData = 1; + */ public java.util.List getFuzzyKeysDataBuilderList() { return getFuzzyKeysDataFieldBuilder().getBuilderList(); @@ -6006,64 +7217,143 @@ public final class FilterProtos { } return fuzzyKeysDataBuilder_; } - + // @@protoc_insertion_point(builder_scope:FuzzyRowFilter) } - + static { defaultInstance = new FuzzyRowFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FuzzyRowFilter) } - + public interface InclusiveStopFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes stopRowKey = 1; + /** + * optional bytes stopRowKey = 1; + */ boolean hasStopRowKey(); + /** + * optional bytes stopRowKey = 1; + */ com.google.protobuf.ByteString getStopRowKey(); } + /** + * Protobuf type {@code InclusiveStopFilter} + */ public static final class InclusiveStopFilter extends com.google.protobuf.GeneratedMessage implements InclusiveStopFilterOrBuilder { // Use InclusiveStopFilter.newBuilder() to construct. - private InclusiveStopFilter(Builder builder) { + private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private InclusiveStopFilter(boolean noInit) {} - + private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final InclusiveStopFilter defaultInstance; public static InclusiveStopFilter getDefaultInstance() { return defaultInstance; } - + public InclusiveStopFilter getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - + private InclusiveStopFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + stopRowKey_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; + } + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public InclusiveStopFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InclusiveStopFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional bytes stopRowKey = 1; public static final int STOPROWKEY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString stopRowKey_; + /** + * optional bytes stopRowKey = 1; + */ public boolean hasStopRowKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes stopRowKey = 1; + */ public com.google.protobuf.ByteString getStopRowKey() { return stopRowKey_; } - + private void initFields() { stopRowKey_ = com.google.protobuf.ByteString.EMPTY; } @@ -6071,11 +7361,11 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6084,12 +7374,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6099,14 +7389,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6116,7 +7406,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj; - + boolean result = true; result = result && (hasStopRowKey() == other.hasStopRowKey()); if (hasStopRowKey()) { @@ -6127,9 +7417,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasStopRowKey()) { @@ -6137,89 +7431,79 @@ public final class FilterProtos { hash = (53 * hash) + getStopRowKey().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code InclusiveStopFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { @@ -6227,18 +7511,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6249,27 +7536,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); stopRowKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); if (!result.isInitialized()) { @@ -6277,17 +7564,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this); int from_bitField0_ = bitField0_; @@ -6300,7 +7577,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other); @@ -6309,7 +7586,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this; if (other.hasStopRowKey()) { @@ -6318,53 +7595,47 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - stopRowKey_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes stopRowKey = 1; private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes stopRowKey = 1; + */ public boolean hasStopRowKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes stopRowKey = 1; + */ public com.google.protobuf.ByteString getStopRowKey() { return stopRowKey_; } + /** + * optional bytes stopRowKey = 1; + */ public Builder setStopRowKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -6374,70 +7645,152 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes stopRowKey = 1; + */ public Builder clearStopRowKey() { bitField0_ = (bitField0_ & ~0x00000001); stopRowKey_ = getDefaultInstance().getStopRowKey(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:InclusiveStopFilter) } - + static { defaultInstance = new InclusiveStopFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:InclusiveStopFilter) } - + public interface KeyOnlyFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool lenAsVal = 1; + /** + * required bool lenAsVal = 1; + */ boolean hasLenAsVal(); + /** + * required bool lenAsVal = 1; + */ boolean getLenAsVal(); } + /** + * Protobuf type {@code KeyOnlyFilter} + */ public static final class KeyOnlyFilter extends com.google.protobuf.GeneratedMessage implements KeyOnlyFilterOrBuilder { // Use KeyOnlyFilter.newBuilder() to construct. - private KeyOnlyFilter(Builder builder) { + private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private KeyOnlyFilter(boolean noInit) {} - + private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final KeyOnlyFilter defaultInstance; public static KeyOnlyFilter getDefaultInstance() { return defaultInstance; } - + public KeyOnlyFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private KeyOnlyFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lenAsVal_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public KeyOnlyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new KeyOnlyFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool lenAsVal = 1; public static final int LENASVAL_FIELD_NUMBER = 1; private boolean lenAsVal_; + /** + * required bool lenAsVal = 1; + */ public boolean hasLenAsVal() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool lenAsVal = 1; + */ public boolean getLenAsVal() { return lenAsVal_; } - + private void initFields() { lenAsVal_ = false; } @@ -6445,7 +7798,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLenAsVal()) { memoizedIsInitialized = 0; return false; @@ -6453,7 +7806,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6462,12 +7815,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6477,14 +7830,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6494,7 +7847,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj; - + boolean result = true; result = result && (hasLenAsVal() == other.hasLenAsVal()); if (hasLenAsVal()) { @@ -6505,9 +7858,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLenAsVal()) { @@ -6515,89 +7872,79 @@ public final class FilterProtos { hash = (53 * hash) + hashBoolean(getLenAsVal()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code KeyOnlyFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { @@ -6605,18 +7952,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6627,27 +7977,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); lenAsVal_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); if (!result.isInitialized()) { @@ -6655,17 +8005,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this); int from_bitField0_ = bitField0_; @@ -6678,7 +8018,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other); @@ -6687,7 +8027,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this; if (other.hasLenAsVal()) { @@ -6696,7 +8036,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLenAsVal()) { @@ -6704,135 +8044,223 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lenAsVal_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool lenAsVal = 1; private boolean lenAsVal_ ; + /** + * required bool lenAsVal = 1; + */ public boolean hasLenAsVal() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool lenAsVal = 1; + */ public boolean getLenAsVal() { return lenAsVal_; } + /** + * required bool lenAsVal = 1; + */ public Builder setLenAsVal(boolean value) { bitField0_ |= 0x00000001; lenAsVal_ = value; onChanged(); return this; } + /** + * required bool lenAsVal = 1; + */ public Builder clearLenAsVal() { bitField0_ = (bitField0_ & ~0x00000001); lenAsVal_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:KeyOnlyFilter) } - + static { defaultInstance = new KeyOnlyFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:KeyOnlyFilter) } - + public interface MultipleColumnPrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated bytes sortedPrefixes = 1; + /** + * repeated bytes sortedPrefixes = 1; + */ java.util.List getSortedPrefixesList(); + /** + * repeated bytes sortedPrefixes = 1; + */ int getSortedPrefixesCount(); + /** + * repeated bytes sortedPrefixes = 1; + */ com.google.protobuf.ByteString getSortedPrefixes(int index); } + /** + * Protobuf type {@code MultipleColumnPrefixFilter} + */ public static final class MultipleColumnPrefixFilter extends com.google.protobuf.GeneratedMessage implements MultipleColumnPrefixFilterOrBuilder { // Use MultipleColumnPrefixFilter.newBuilder() to construct. - private MultipleColumnPrefixFilter(Builder builder) { + private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultipleColumnPrefixFilter(boolean noInit) {} - + private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultipleColumnPrefixFilter defaultInstance; public static MultipleColumnPrefixFilter getDefaultInstance() { return defaultInstance; } - + public MultipleColumnPrefixFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultipleColumnPrefixFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + sortedPrefixes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + sortedPrefixes_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultipleColumnPrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultipleColumnPrefixFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated bytes sortedPrefixes = 1; public static final int SORTEDPREFIXES_FIELD_NUMBER = 1; private java.util.List sortedPrefixes_; + /** + * repeated bytes sortedPrefixes = 1; + */ public java.util.List getSortedPrefixesList() { return sortedPrefixes_; } + /** + * repeated bytes sortedPrefixes = 1; + */ public int getSortedPrefixesCount() { return sortedPrefixes_.size(); } + /** + * repeated bytes sortedPrefixes = 1; + */ public com.google.protobuf.ByteString getSortedPrefixes(int index) { return sortedPrefixes_.get(index); } - + private void initFields() { - sortedPrefixes_ = java.util.Collections.emptyList();; + sortedPrefixes_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6841,12 +8269,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -6861,14 +8289,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6878,7 +8306,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj; - + boolean result = true; result = result && getSortedPrefixesList() .equals(other.getSortedPrefixesList()); @@ -6886,9 +8314,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getSortedPrefixesCount() > 0) { @@ -6896,89 +8328,79 @@ public final class FilterProtos { hash = (53 * hash) + getSortedPrefixesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultipleColumnPrefixFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { @@ -6986,18 +8408,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7008,27 +8433,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - sortedPrefixes_ = java.util.Collections.emptyList();; + sortedPrefixes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); if (!result.isInitialized()) { @@ -7036,17 +8461,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this); int from_bitField0_ = bitField0_; @@ -7058,7 +8473,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other); @@ -7067,7 +8482,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this; if (!other.sortedPrefixes_.isEmpty()) { @@ -7083,63 +8498,60 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureSortedPrefixesIsMutable(); - sortedPrefixes_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated bytes sortedPrefixes = 1; - private java.util.List sortedPrefixes_ = java.util.Collections.emptyList();; + private java.util.List sortedPrefixes_ = java.util.Collections.emptyList(); private void ensureSortedPrefixesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = new java.util.ArrayList(sortedPrefixes_); bitField0_ |= 0x00000001; } } + /** + * repeated bytes sortedPrefixes = 1; + */ public java.util.List getSortedPrefixesList() { return java.util.Collections.unmodifiableList(sortedPrefixes_); } + /** + * repeated bytes sortedPrefixes = 1; + */ public int getSortedPrefixesCount() { return sortedPrefixes_.size(); } + /** + * repeated bytes sortedPrefixes = 1; + */ public com.google.protobuf.ByteString getSortedPrefixes(int index) { return sortedPrefixes_.get(index); } + /** + * repeated bytes sortedPrefixes = 1; + */ public Builder setSortedPrefixes( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -7150,6 +8562,9 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes sortedPrefixes = 1; + */ public Builder addSortedPrefixes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7159,6 +8574,9 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes sortedPrefixes = 1; + */ public Builder addAllSortedPrefixes( java.lang.Iterable values) { ensureSortedPrefixesIsMutable(); @@ -7166,70 +8584,152 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated bytes sortedPrefixes = 1; + */ public Builder clearSortedPrefixes() { - sortedPrefixes_ = java.util.Collections.emptyList();; + sortedPrefixes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:MultipleColumnPrefixFilter) } - + static { defaultInstance = new MultipleColumnPrefixFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultipleColumnPrefixFilter) } - + public interface PageFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int64 pageSize = 1; + /** + * required int64 pageSize = 1; + */ boolean hasPageSize(); + /** + * required int64 pageSize = 1; + */ long getPageSize(); } + /** + * Protobuf type {@code PageFilter} + */ public static final class PageFilter extends com.google.protobuf.GeneratedMessage implements PageFilterOrBuilder { // Use PageFilter.newBuilder() to construct. - private PageFilter(Builder builder) { + private PageFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PageFilter(boolean noInit) {} - + private PageFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PageFilter defaultInstance; public static PageFilter getDefaultInstance() { return defaultInstance; } - + public PageFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PageFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + pageSize_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PageFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PageFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required int64 pageSize = 1; public static final int PAGESIZE_FIELD_NUMBER = 1; private long pageSize_; + /** + * required int64 pageSize = 1; + */ public boolean hasPageSize() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 pageSize = 1; + */ public long getPageSize() { return pageSize_; } - + private void initFields() { pageSize_ = 0L; } @@ -7237,7 +8737,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPageSize()) { memoizedIsInitialized = 0; return false; @@ -7245,7 +8745,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7254,12 +8754,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7269,14 +8769,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7286,7 +8786,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj; - + boolean result = true; result = result && (hasPageSize() == other.hasPageSize()); if (hasPageSize()) { @@ -7297,9 +8797,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPageSize()) { @@ -7307,89 +8811,79 @@ public final class FilterProtos { hash = (53 * hash) + hashLong(getPageSize()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PageFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder { @@ -7397,18 +8891,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7419,27 +8916,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); pageSize_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); if (!result.isInitialized()) { @@ -7447,17 +8944,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this); int from_bitField0_ = bitField0_; @@ -7470,7 +8957,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other); @@ -7479,7 +8966,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this; if (other.hasPageSize()) { @@ -7488,7 +8975,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPageSize()) { @@ -7496,119 +8983,195 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - pageSize_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int64 pageSize = 1; private long pageSize_ ; + /** + * required int64 pageSize = 1; + */ public boolean hasPageSize() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 pageSize = 1; + */ public long getPageSize() { return pageSize_; } + /** + * required int64 pageSize = 1; + */ public Builder setPageSize(long value) { bitField0_ |= 0x00000001; pageSize_ = value; onChanged(); return this; } + /** + * required int64 pageSize = 1; + */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000001); pageSize_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:PageFilter) } - + static { defaultInstance = new PageFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PageFilter) } - + public interface PrefixFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes prefix = 1; + /** + * optional bytes prefix = 1; + */ boolean hasPrefix(); + /** + * optional bytes prefix = 1; + */ com.google.protobuf.ByteString getPrefix(); } + /** + * Protobuf type {@code PrefixFilter} + */ public static final class PrefixFilter extends com.google.protobuf.GeneratedMessage implements PrefixFilterOrBuilder { // Use PrefixFilter.newBuilder() to construct. - private PrefixFilter(Builder builder) { + private PrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PrefixFilter(boolean noInit) {} - + private PrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PrefixFilter defaultInstance; public static PrefixFilter getDefaultInstance() { return defaultInstance; } - + public PrefixFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrefixFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + prefix_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrefixFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; + /** + * optional bytes prefix = 1; + */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes prefix = 1; + */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } - + private void initFields() { prefix_ = com.google.protobuf.ByteString.EMPTY; } @@ -7616,11 +9179,11 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7629,12 +9192,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7644,14 +9207,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7661,7 +9224,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj; - + boolean result = true; result = result && (hasPrefix() == other.hasPrefix()); if (hasPrefix()) { @@ -7672,9 +9235,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrefix()) { @@ -7682,89 +9249,79 @@ public final class FilterProtos { hash = (53 * hash) + getPrefix().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PrefixFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { @@ -7772,18 +9329,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7794,27 +9354,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); if (!result.isInitialized()) { @@ -7822,17 +9382,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this); int from_bitField0_ = bitField0_; @@ -7845,7 +9395,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other); @@ -7854,7 +9404,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this; if (other.hasPrefix()) { @@ -7863,53 +9413,47 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - prefix_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes prefix = 1; + */ public boolean hasPrefix() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes prefix = 1; + */ public com.google.protobuf.ByteString getPrefix() { return prefix_; } + /** + * optional bytes prefix = 1; + */ public Builder setPrefix(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7919,74 +9463,170 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes prefix = 1; + */ public Builder clearPrefix() { bitField0_ = (bitField0_ & ~0x00000001); prefix_ = getDefaultInstance().getPrefix(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:PrefixFilter) } - + static { defaultInstance = new PrefixFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PrefixFilter) } - + public interface QualifierFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareFilter compareFilter = 1; + /** + * required .CompareFilter compareFilter = 1; + */ boolean hasCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } + /** + * Protobuf type {@code QualifierFilter} + */ public static final class QualifierFilter extends com.google.protobuf.GeneratedMessage implements QualifierFilterOrBuilder { // Use QualifierFilter.newBuilder() to construct. - private QualifierFilter(Builder builder) { + private QualifierFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private QualifierFilter(boolean noInit) {} - + private QualifierFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final QualifierFilter defaultInstance; public static QualifierFilter getDefaultInstance() { return defaultInstance; } - + public QualifierFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private QualifierFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = compareFilter_.toBuilder(); + } + compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compareFilter_); + compareFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public QualifierFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QualifierFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .CompareFilter compareFilter = 1; public static final int COMPAREFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } - + private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } @@ -7994,7 +9634,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; @@ -8006,7 +9646,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8015,12 +9655,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8030,14 +9670,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8047,7 +9687,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj; - + boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { @@ -8058,9 +9698,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { @@ -8068,89 +9712,79 @@ public final class FilterProtos { hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code QualifierFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { @@ -8158,18 +9792,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8181,7 +9818,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { @@ -8192,20 +9829,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); if (!result.isInitialized()) { @@ -8213,17 +9850,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this); int from_bitField0_ = bitField0_; @@ -8240,7 +9867,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other); @@ -8249,7 +9876,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { @@ -8258,7 +9885,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareFilter()) { @@ -8270,52 +9897,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareFilter compareFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; @@ -8323,6 +9937,9 @@ public final class FilterProtos { return compareFilterBuilder_.getMessage(); } } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { @@ -8336,6 +9953,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { @@ -8347,6 +9967,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -8363,6 +9986,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); @@ -8373,11 +9999,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); @@ -8385,6 +10017,9 @@ public final class FilterProtos { return compareFilter_; } } + /** + * required .CompareFilter compareFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { @@ -8398,64 +10033,143 @@ public final class FilterProtos { } return compareFilterBuilder_; } - + // @@protoc_insertion_point(builder_scope:QualifierFilter) } - + static { defaultInstance = new QualifierFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:QualifierFilter) } - + public interface RandomRowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required float chance = 1; + /** + * required float chance = 1; + */ boolean hasChance(); + /** + * required float chance = 1; + */ float getChance(); } + /** + * Protobuf type {@code RandomRowFilter} + */ public static final class RandomRowFilter extends com.google.protobuf.GeneratedMessage implements RandomRowFilterOrBuilder { // Use RandomRowFilter.newBuilder() to construct. - private RandomRowFilter(Builder builder) { + private RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RandomRowFilter(boolean noInit) {} - + private RandomRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RandomRowFilter defaultInstance; public static RandomRowFilter getDefaultInstance() { return defaultInstance; } - + public RandomRowFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RandomRowFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 13: { + bitField0_ |= 0x00000001; + chance_ = input.readFloat(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RandomRowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RandomRowFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required float chance = 1; public static final int CHANCE_FIELD_NUMBER = 1; private float chance_; + /** + * required float chance = 1; + */ public boolean hasChance() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required float chance = 1; + */ public float getChance() { return chance_; } - + private void initFields() { chance_ = 0F; } @@ -8463,7 +10177,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasChance()) { memoizedIsInitialized = 0; return false; @@ -8471,7 +10185,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8480,12 +10194,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8495,14 +10209,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8512,7 +10226,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj; - + boolean result = true; result = result && (hasChance() == other.hasChance()); if (hasChance()) { @@ -8522,9 +10236,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChance()) { @@ -8533,89 +10251,79 @@ public final class FilterProtos { getChance()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RandomRowFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { @@ -8623,18 +10331,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8645,27 +10356,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); chance_ = 0F; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); if (!result.isInitialized()) { @@ -8673,17 +10384,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this); int from_bitField0_ = bitField0_; @@ -8696,7 +10397,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other); @@ -8705,7 +10406,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this; if (other.hasChance()) { @@ -8714,131 +10415,221 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasChance()) { return false; } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 13: { - bitField0_ |= 0x00000001; - chance_ = input.readFloat(); - break; - } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required float chance = 1; private float chance_ ; + /** + * required float chance = 1; + */ public boolean hasChance() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required float chance = 1; + */ public float getChance() { return chance_; } + /** + * required float chance = 1; + */ public Builder setChance(float value) { bitField0_ |= 0x00000001; chance_ = value; onChanged(); return this; } + /** + * required float chance = 1; + */ public Builder clearChance() { bitField0_ = (bitField0_ & ~0x00000001); chance_ = 0F; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RandomRowFilter) } - + static { defaultInstance = new RandomRowFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RandomRowFilter) } - + public interface RowFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareFilter compareFilter = 1; + /** + * required .CompareFilter compareFilter = 1; + */ boolean hasCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } + /** + * Protobuf type {@code RowFilter} + */ public static final class RowFilter extends com.google.protobuf.GeneratedMessage implements RowFilterOrBuilder { // Use RowFilter.newBuilder() to construct. - private RowFilter(Builder builder) { + private RowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RowFilter(boolean noInit) {} - + private RowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RowFilter defaultInstance; public static RowFilter getDefaultInstance() { return defaultInstance; } - + public RowFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = compareFilter_.toBuilder(); + } + compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compareFilter_); + compareFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .CompareFilter compareFilter = 1; public static final int COMPAREFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } - + private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } @@ -8846,7 +10637,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; @@ -8858,7 +10649,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8867,12 +10658,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8882,14 +10673,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8899,7 +10690,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj; - + boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { @@ -8910,9 +10701,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { @@ -8920,89 +10715,79 @@ public final class FilterProtos { hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RowFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder { @@ -9010,18 +10795,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9033,7 +10821,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { @@ -9044,20 +10832,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); if (!result.isInitialized()) { @@ -9065,17 +10853,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this); int from_bitField0_ = bitField0_; @@ -9092,7 +10870,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other); @@ -9101,7 +10879,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { @@ -9110,7 +10888,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareFilter()) { @@ -9122,52 +10900,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareFilter compareFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; @@ -9175,6 +10940,9 @@ public final class FilterProtos { return compareFilterBuilder_.getMessage(); } } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { @@ -9188,6 +10956,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { @@ -9199,6 +10970,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -9215,6 +10989,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); @@ -9225,11 +11002,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); @@ -9237,6 +11020,9 @@ public final class FilterProtos { return compareFilter_; } } + /** + * required .CompareFilter compareFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { @@ -9250,68 +11036,161 @@ public final class FilterProtos { } return compareFilterBuilder_; } - + // @@protoc_insertion_point(builder_scope:RowFilter) } - + static { defaultInstance = new RowFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RowFilter) } - + public interface SingleColumnValueExcludeFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .SingleColumnValueFilter singleColumnValueFilter = 1; + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ boolean hasSingleColumnValueFilter(); + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter(); + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder(); } + /** + * Protobuf type {@code SingleColumnValueExcludeFilter} + */ public static final class SingleColumnValueExcludeFilter extends com.google.protobuf.GeneratedMessage implements SingleColumnValueExcludeFilterOrBuilder { // Use SingleColumnValueExcludeFilter.newBuilder() to construct. - private SingleColumnValueExcludeFilter(Builder builder) { + private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SingleColumnValueExcludeFilter(boolean noInit) {} - + private SingleColumnValueExcludeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SingleColumnValueExcludeFilter defaultInstance; public static SingleColumnValueExcludeFilter getDefaultInstance() { return defaultInstance; } - + public SingleColumnValueExcludeFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SingleColumnValueExcludeFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = singleColumnValueFilter_.toBuilder(); + } + singleColumnValueFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(singleColumnValueFilter_); + singleColumnValueFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SingleColumnValueExcludeFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SingleColumnValueExcludeFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .SingleColumnValueFilter singleColumnValueFilter = 1; public static final int SINGLECOLUMNVALUEFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_; + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public boolean hasSingleColumnValueFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { return singleColumnValueFilter_; } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { return singleColumnValueFilter_; } - + private void initFields() { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); } @@ -9319,7 +11198,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSingleColumnValueFilter()) { memoizedIsInitialized = 0; return false; @@ -9331,7 +11210,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9340,12 +11219,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9355,14 +11234,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9372,7 +11251,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj; - + boolean result = true; result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter()); if (hasSingleColumnValueFilter()) { @@ -9383,9 +11262,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSingleColumnValueFilter()) { @@ -9393,89 +11276,79 @@ public final class FilterProtos { hash = (53 * hash) + getSingleColumnValueFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SingleColumnValueExcludeFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { @@ -9483,18 +11356,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9506,7 +11382,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (singleColumnValueFilterBuilder_ == null) { @@ -9517,20 +11393,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); if (!result.isInitialized()) { @@ -9538,17 +11414,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this); int from_bitField0_ = bitField0_; @@ -9565,7 +11431,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other); @@ -9574,7 +11440,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this; if (other.hasSingleColumnValueFilter()) { @@ -9583,7 +11449,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSingleColumnValueFilter()) { @@ -9595,52 +11461,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(); - if (hasSingleColumnValueFilter()) { - subBuilder.mergeFrom(getSingleColumnValueFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setSingleColumnValueFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .SingleColumnValueFilter singleColumnValueFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_; + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public boolean hasSingleColumnValueFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { return singleColumnValueFilter_; @@ -9648,6 +11501,9 @@ public final class FilterProtos { return singleColumnValueFilterBuilder_.getMessage(); } } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { if (singleColumnValueFilterBuilder_ == null) { if (value == null) { @@ -9661,6 +11517,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public Builder setSingleColumnValueFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) { if (singleColumnValueFilterBuilder_ == null) { @@ -9672,6 +11531,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { if (singleColumnValueFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -9688,6 +11550,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public Builder clearSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); @@ -9698,11 +11563,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSingleColumnValueFilterFieldBuilder().getBuilder(); } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { if (singleColumnValueFilterBuilder_ != null) { return singleColumnValueFilterBuilder_.getMessageOrBuilder(); @@ -9710,6 +11581,9 @@ public final class FilterProtos { return singleColumnValueFilter_; } } + /** + * required .SingleColumnValueFilter singleColumnValueFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> getSingleColumnValueFilterFieldBuilder() { @@ -9723,138 +11597,322 @@ public final class FilterProtos { } return singleColumnValueFilterBuilder_; } - + // @@protoc_insertion_point(builder_scope:SingleColumnValueExcludeFilter) } - + static { defaultInstance = new SingleColumnValueExcludeFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SingleColumnValueExcludeFilter) } - + public interface SingleColumnValueFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes columnFamily = 1; + /** + * optional bytes columnFamily = 1; + */ boolean hasColumnFamily(); + /** + * optional bytes columnFamily = 1; + */ com.google.protobuf.ByteString getColumnFamily(); - + // optional bytes columnQualifier = 2; + /** + * optional bytes columnQualifier = 2; + */ boolean hasColumnQualifier(); + /** + * optional bytes columnQualifier = 2; + */ com.google.protobuf.ByteString getColumnQualifier(); - + // required .CompareType compareOp = 3; + /** + * required .CompareType compareOp = 3; + */ boolean hasCompareOp(); + /** + * required .CompareType compareOp = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - + // required .Comparator comparator = 4; + /** + * required .Comparator comparator = 4; + */ boolean hasComparator(); + /** + * required .Comparator comparator = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + /** + * required .Comparator comparator = 4; + */ org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); - + // optional bool filterIfMissing = 5; + /** + * optional bool filterIfMissing = 5; + */ boolean hasFilterIfMissing(); + /** + * optional bool filterIfMissing = 5; + */ boolean getFilterIfMissing(); - + // optional bool latestVersionOnly = 6; + /** + * optional bool latestVersionOnly = 6; + */ boolean hasLatestVersionOnly(); + /** + * optional bool latestVersionOnly = 6; + */ boolean getLatestVersionOnly(); } + /** + * Protobuf type {@code SingleColumnValueFilter} + */ public static final class SingleColumnValueFilter extends com.google.protobuf.GeneratedMessage implements SingleColumnValueFilterOrBuilder { // Use SingleColumnValueFilter.newBuilder() to construct. - private SingleColumnValueFilter(Builder builder) { + private SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SingleColumnValueFilter(boolean noInit) {} - + private SingleColumnValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SingleColumnValueFilter defaultInstance; public static SingleColumnValueFilter getDefaultInstance() { return defaultInstance; } - + public SingleColumnValueFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SingleColumnValueFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + columnFamily_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnQualifier_ = input.readBytes(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + compareOp_ = value; + } + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + subBuilder = comparator_.toBuilder(); + } + comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparator_); + comparator_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000008; + break; + } + case 40: { + bitField0_ |= 0x00000010; + filterIfMissing_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + latestVersionOnly_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SingleColumnValueFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SingleColumnValueFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes columnFamily = 1; public static final int COLUMNFAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString columnFamily_; + /** + * optional bytes columnFamily = 1; + */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes columnFamily = 1; + */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } - + // optional bytes columnQualifier = 2; public static final int COLUMNQUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnQualifier_; + /** + * optional bytes columnQualifier = 2; + */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes columnQualifier = 2; + */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } - + // required .CompareType compareOp = 3; public static final int COMPAREOP_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; + /** + * required .CompareType compareOp = 3; + */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required .CompareType compareOp = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } - + // required .Comparator comparator = 4; public static final int COMPARATOR_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + /** + * required .Comparator comparator = 4; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .Comparator comparator = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_; } + /** + * required .Comparator comparator = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_; } - + // optional bool filterIfMissing = 5; public static final int FILTERIFMISSING_FIELD_NUMBER = 5; private boolean filterIfMissing_; + /** + * optional bool filterIfMissing = 5; + */ public boolean hasFilterIfMissing() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool filterIfMissing = 5; + */ public boolean getFilterIfMissing() { return filterIfMissing_; } - + // optional bool latestVersionOnly = 6; public static final int LATESTVERSIONONLY_FIELD_NUMBER = 6; private boolean latestVersionOnly_; + /** + * optional bool latestVersionOnly = 6; + */ public boolean hasLatestVersionOnly() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool latestVersionOnly = 6; + */ public boolean getLatestVersionOnly() { return latestVersionOnly_; } - + private void initFields() { columnFamily_ = com.google.protobuf.ByteString.EMPTY; columnQualifier_ = com.google.protobuf.ByteString.EMPTY; @@ -9867,7 +11925,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareOp()) { memoizedIsInitialized = 0; return false; @@ -9883,7 +11941,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9907,12 +11965,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9942,14 +12000,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9959,7 +12017,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj; - + boolean result = true; result = result && (hasColumnFamily() == other.hasColumnFamily()); if (hasColumnFamily()) { @@ -9995,9 +12053,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasColumnFamily()) { @@ -10025,89 +12087,79 @@ public final class FilterProtos { hash = (53 * hash) + hashBoolean(getLatestVersionOnly()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SingleColumnValueFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { @@ -10115,18 +12167,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10138,7 +12193,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; @@ -10159,20 +12214,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); if (!result.isInitialized()) { @@ -10180,17 +12235,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this); int from_bitField0_ = bitField0_; @@ -10227,7 +12272,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other); @@ -10236,7 +12281,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this; if (other.hasColumnFamily()) { @@ -10260,7 +12305,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareOp()) { @@ -10276,84 +12321,43 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - columnFamily_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnQualifier_ = input.readBytes(); - break; - } - case 24: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(3, rawValue); - } else { - bitField0_ |= 0x00000004; - compareOp_ = value; - } - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } - case 40: { - bitField0_ |= 0x00000010; - filterIfMissing_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - latestVersionOnly_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes columnFamily = 1; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes columnFamily = 1; + */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes columnFamily = 1; + */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } + /** + * optional bytes columnFamily = 1; + */ public Builder setColumnFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10363,21 +12367,33 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes columnFamily = 1; + */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000001); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } - + // optional bytes columnQualifier = 2; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes columnQualifier = 2; + */ public boolean hasColumnQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes columnQualifier = 2; + */ public com.google.protobuf.ByteString getColumnQualifier() { return columnQualifier_; } + /** + * optional bytes columnQualifier = 2; + */ public Builder setColumnQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10387,21 +12403,33 @@ public final class FilterProtos { onChanged(); return this; } + /** + * optional bytes columnQualifier = 2; + */ public Builder clearColumnQualifier() { bitField0_ = (bitField0_ & ~0x00000002); columnQualifier_ = getDefaultInstance().getColumnQualifier(); onChanged(); return this; } - + // required .CompareType compareOp = 3; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + /** + * required .CompareType compareOp = 3; + */ public boolean hasCompareOp() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required .CompareType compareOp = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { return compareOp_; } + /** + * required .CompareType compareOp = 3; + */ public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); @@ -10411,20 +12439,29 @@ public final class FilterProtos { onChanged(); return this; } + /** + * required .CompareType compareOp = 3; + */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000004); compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; onChanged(); return this; } - + // required .Comparator comparator = 4; private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + /** + * required .Comparator comparator = 4; + */ public boolean hasComparator() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .Comparator comparator = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_; @@ -10432,6 +12469,9 @@ public final class FilterProtos { return comparatorBuilder_.getMessage(); } } + /** + * required .Comparator comparator = 4; + */ public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { @@ -10445,6 +12485,9 @@ public final class FilterProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .Comparator comparator = 4; + */ public Builder setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { @@ -10456,6 +12499,9 @@ public final class FilterProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .Comparator comparator = 4; + */ public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && @@ -10472,6 +12518,9 @@ public final class FilterProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .Comparator comparator = 4; + */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); @@ -10482,11 +12531,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } + /** + * required .Comparator comparator = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000008; onChanged(); return getComparatorFieldBuilder().getBuilder(); } + /** + * required .Comparator comparator = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); @@ -10494,6 +12549,9 @@ public final class FilterProtos { return comparator_; } } + /** + * required .Comparator comparator = 4; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { @@ -10507,110 +12565,227 @@ public final class FilterProtos { } return comparatorBuilder_; } - + // optional bool filterIfMissing = 5; private boolean filterIfMissing_ ; + /** + * optional bool filterIfMissing = 5; + */ public boolean hasFilterIfMissing() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool filterIfMissing = 5; + */ public boolean getFilterIfMissing() { return filterIfMissing_; } + /** + * optional bool filterIfMissing = 5; + */ public Builder setFilterIfMissing(boolean value) { bitField0_ |= 0x00000010; filterIfMissing_ = value; onChanged(); return this; } + /** + * optional bool filterIfMissing = 5; + */ public Builder clearFilterIfMissing() { bitField0_ = (bitField0_ & ~0x00000010); filterIfMissing_ = false; onChanged(); return this; } - + // optional bool latestVersionOnly = 6; private boolean latestVersionOnly_ ; + /** + * optional bool latestVersionOnly = 6; + */ public boolean hasLatestVersionOnly() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool latestVersionOnly = 6; + */ public boolean getLatestVersionOnly() { return latestVersionOnly_; } + /** + * optional bool latestVersionOnly = 6; + */ public Builder setLatestVersionOnly(boolean value) { bitField0_ |= 0x00000020; latestVersionOnly_ = value; onChanged(); return this; } + /** + * optional bool latestVersionOnly = 6; + */ public Builder clearLatestVersionOnly() { bitField0_ = (bitField0_ & ~0x00000020); latestVersionOnly_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SingleColumnValueFilter) } - + static { defaultInstance = new SingleColumnValueFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SingleColumnValueFilter) } - + public interface SkipFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .Filter filter = 1; + /** + * required .Filter filter = 1; + */ boolean hasFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); } + /** + * Protobuf type {@code SkipFilter} + */ public static final class SkipFilter extends com.google.protobuf.GeneratedMessage implements SkipFilterOrBuilder { // Use SkipFilter.newBuilder() to construct. - private SkipFilter(Builder builder) { + private SkipFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SkipFilter(boolean noInit) {} - + private SkipFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SkipFilter defaultInstance; public static SkipFilter getDefaultInstance() { return defaultInstance; } - + public SkipFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SkipFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SkipFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SkipFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { return filter_; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } - + private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); } @@ -10618,7 +12793,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFilter()) { memoizedIsInitialized = 0; return false; @@ -10630,7 +12805,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10639,12 +12814,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -10654,14 +12829,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10671,7 +12846,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj; - + boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { @@ -10682,9 +12857,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { @@ -10692,89 +12871,79 @@ public final class FilterProtos { hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SkipFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder { @@ -10782,18 +12951,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10805,7 +12977,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (filterBuilder_ == null) { @@ -10816,20 +12988,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); if (!result.isInitialized()) { @@ -10837,17 +13009,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this); int from_bitField0_ = bitField0_; @@ -10864,7 +13026,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other); @@ -10873,7 +13035,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this; if (other.hasFilter()) { @@ -10882,7 +13044,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFilter()) { @@ -10894,52 +13056,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; @@ -10947,6 +13096,9 @@ public final class FilterProtos { return filterBuilder_.getMessage(); } } + /** + * required .Filter filter = 1; + */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { @@ -10960,6 +13112,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { @@ -10971,6 +13126,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -10987,6 +13145,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); @@ -10997,11 +13158,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); @@ -11009,6 +13176,9 @@ public final class FilterProtos { return filter_; } } + /** + * required .Filter filter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> getFilterFieldBuilder() { @@ -11022,81 +13192,185 @@ public final class FilterProtos { } return filterBuilder_; } - + // @@protoc_insertion_point(builder_scope:SkipFilter) } - + static { defaultInstance = new SkipFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SkipFilter) } - + public interface TimestampsFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated int64 timestamps = 1 [packed = true]; + /** + * repeated int64 timestamps = 1 [packed = true]; + */ java.util.List getTimestampsList(); + /** + * repeated int64 timestamps = 1 [packed = true]; + */ int getTimestampsCount(); + /** + * repeated int64 timestamps = 1 [packed = true]; + */ long getTimestamps(int index); } + /** + * Protobuf type {@code TimestampsFilter} + */ public static final class TimestampsFilter extends com.google.protobuf.GeneratedMessage implements TimestampsFilterOrBuilder { // Use TimestampsFilter.newBuilder() to construct. - private TimestampsFilter(Builder builder) { + private TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TimestampsFilter(boolean noInit) {} - + private TimestampsFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TimestampsFilter defaultInstance; public static TimestampsFilter getDefaultInstance() { return defaultInstance; } - + public TimestampsFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TimestampsFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + timestamps_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + timestamps_.add(input.readInt64()); + break; + } + case 10: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { + timestamps_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + while (input.getBytesUntilLimit() > 0) { + timestamps_.add(input.readInt64()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + timestamps_ = java.util.Collections.unmodifiableList(timestamps_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TimestampsFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimestampsFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated int64 timestamps = 1 [packed = true]; public static final int TIMESTAMPS_FIELD_NUMBER = 1; private java.util.List timestamps_; + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public java.util.List getTimestampsList() { return timestamps_; } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public int getTimestampsCount() { return timestamps_.size(); } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public long getTimestamps(int index) { return timestamps_.get(index); } private int timestampsMemoizedSerializedSize = -1; - + private void initFields() { - timestamps_ = java.util.Collections.emptyList();; + timestamps_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11109,12 +13383,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -11134,14 +13408,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11151,7 +13425,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj; - + boolean result = true; result = result && getTimestampsList() .equals(other.getTimestampsList()); @@ -11159,9 +13433,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTimestampsCount() > 0) { @@ -11169,89 +13447,79 @@ public final class FilterProtos { hash = (53 * hash) + getTimestampsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TimestampsFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { @@ -11259,18 +13527,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11281,27 +13552,27 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); - timestamps_ = java.util.Collections.emptyList();; + timestamps_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); if (!result.isInitialized()) { @@ -11309,17 +13580,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this); int from_bitField0_ = bitField0_; @@ -11331,7 +13592,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other); @@ -11340,7 +13601,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this; if (!other.timestamps_.isEmpty()) { @@ -11356,72 +13617,60 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - ensureTimestampsIsMutable(); - timestamps_.add(input.readInt64()); - break; - } - case 10: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - while (input.getBytesUntilLimit() > 0) { - addTimestamps(input.readInt64()); - } - input.popLimit(limit); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated int64 timestamps = 1 [packed = true]; - private java.util.List timestamps_ = java.util.Collections.emptyList();; + private java.util.List timestamps_ = java.util.Collections.emptyList(); private void ensureTimestampsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = new java.util.ArrayList(timestamps_); bitField0_ |= 0x00000001; } } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public java.util.List getTimestampsList() { return java.util.Collections.unmodifiableList(timestamps_); } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public int getTimestampsCount() { return timestamps_.size(); } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public long getTimestamps(int index) { return timestamps_.get(index); } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public Builder setTimestamps( int index, long value) { ensureTimestampsIsMutable(); @@ -11429,12 +13678,18 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public Builder addTimestamps(long value) { ensureTimestampsIsMutable(); timestamps_.add(value); onChanged(); return this; } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public Builder addAllTimestamps( java.lang.Iterable values) { ensureTimestampsIsMutable(); @@ -11442,74 +13697,170 @@ public final class FilterProtos { onChanged(); return this; } + /** + * repeated int64 timestamps = 1 [packed = true]; + */ public Builder clearTimestamps() { - timestamps_ = java.util.Collections.emptyList();; + timestamps_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TimestampsFilter) } - + static { defaultInstance = new TimestampsFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TimestampsFilter) } - + public interface ValueFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .CompareFilter compareFilter = 1; + /** + * required .CompareFilter compareFilter = 1; + */ boolean hasCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + /** + * required .CompareFilter compareFilter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); } + /** + * Protobuf type {@code ValueFilter} + */ public static final class ValueFilter extends com.google.protobuf.GeneratedMessage implements ValueFilterOrBuilder { // Use ValueFilter.newBuilder() to construct. - private ValueFilter(Builder builder) { + private ValueFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ValueFilter(boolean noInit) {} - + private ValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ValueFilter defaultInstance; public static ValueFilter getDefaultInstance() { return defaultInstance; } - + public ValueFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ValueFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = compareFilter_.toBuilder(); + } + compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compareFilter_); + compareFilter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ValueFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ValueFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .CompareFilter compareFilter = 1; public static final int COMPAREFILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { return compareFilter_; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { return compareFilter_; } - + private void initFields() { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); } @@ -11517,7 +13868,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCompareFilter()) { memoizedIsInitialized = 0; return false; @@ -11529,7 +13880,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11538,12 +13889,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -11553,14 +13904,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11570,7 +13921,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj; - + boolean result = true; result = result && (hasCompareFilter() == other.hasCompareFilter()); if (hasCompareFilter()) { @@ -11581,9 +13932,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareFilter()) { @@ -11591,89 +13946,79 @@ public final class FilterProtos { hash = (53 * hash) + getCompareFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ValueFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder { @@ -11681,18 +14026,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11704,7 +14052,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { @@ -11715,20 +14063,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); if (!result.isInitialized()) { @@ -11736,17 +14084,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this); int from_bitField0_ = bitField0_; @@ -11763,7 +14101,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other); @@ -11772,7 +14110,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this; if (other.hasCompareFilter()) { @@ -11781,7 +14119,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCompareFilter()) { @@ -11793,52 +14131,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .CompareFilter compareFilter = 1; private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + /** + * required .CompareFilter compareFilter = 1; + */ public boolean hasCompareFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { return compareFilter_; @@ -11846,6 +14171,9 @@ public final class FilterProtos { return compareFilterBuilder_.getMessage(); } } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (value == null) { @@ -11859,6 +14187,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { if (compareFilterBuilder_ == null) { @@ -11870,6 +14201,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -11886,6 +14220,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); @@ -11896,11 +14233,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompareFilterFieldBuilder().getBuilder(); } + /** + * required .CompareFilter compareFilter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); @@ -11908,6 +14251,9 @@ public final class FilterProtos { return compareFilter_; } } + /** + * required .CompareFilter compareFilter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { @@ -11921,68 +14267,161 @@ public final class FilterProtos { } return compareFilterBuilder_; } - + // @@protoc_insertion_point(builder_scope:ValueFilter) } - + static { defaultInstance = new ValueFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ValueFilter) } - + public interface WhileMatchFilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .Filter filter = 1; + /** + * required .Filter filter = 1; + */ boolean hasFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + /** + * required .Filter filter = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); } + /** + * Protobuf type {@code WhileMatchFilter} + */ public static final class WhileMatchFilter extends com.google.protobuf.GeneratedMessage implements WhileMatchFilterOrBuilder { // Use WhileMatchFilter.newBuilder() to construct. - private WhileMatchFilter(Builder builder) { + private WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private WhileMatchFilter(boolean noInit) {} - + private WhileMatchFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final WhileMatchFilter defaultInstance; public static WhileMatchFilter getDefaultInstance() { return defaultInstance; } - + public WhileMatchFilter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WhileMatchFilter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public WhileMatchFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WhileMatchFilter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { return filter_; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { return filter_; } - + private void initFields() { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); } @@ -11990,7 +14429,7 @@ public final class FilterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFilter()) { memoizedIsInitialized = 0; return false; @@ -12002,7 +14441,7 @@ public final class FilterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12011,12 +14450,12 @@ public final class FilterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12026,14 +14465,14 @@ public final class FilterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12043,7 +14482,7 @@ public final class FilterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj; - + boolean result = true; result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { @@ -12054,9 +14493,13 @@ public final class FilterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFilter()) { @@ -12064,89 +14507,79 @@ public final class FilterProtos { hash = (53 * hash) + getFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code WhileMatchFilter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { @@ -12154,18 +14587,21 @@ public final class FilterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12177,7 +14613,7 @@ public final class FilterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (filterBuilder_ == null) { @@ -12188,20 +14624,20 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); if (!result.isInitialized()) { @@ -12209,17 +14645,7 @@ public final class FilterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this); int from_bitField0_ = bitField0_; @@ -12236,7 +14662,7 @@ public final class FilterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other); @@ -12245,7 +14671,7 @@ public final class FilterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this; if (other.hasFilter()) { @@ -12254,7 +14680,7 @@ public final class FilterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFilter()) { @@ -12266,52 +14692,39 @@ public final class FilterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .Filter filter = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + /** + * required .Filter filter = 1; + */ public boolean hasFilter() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_; @@ -12319,6 +14732,9 @@ public final class FilterProtos { return filterBuilder_.getMessage(); } } + /** + * required .Filter filter = 1; + */ public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { @@ -12332,6 +14748,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder setFilter( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { @@ -12343,6 +14762,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -12359,6 +14781,9 @@ public final class FilterProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .Filter filter = 1; + */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); @@ -12369,11 +14794,17 @@ public final class FilterProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } + /** + * required .Filter filter = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); @@ -12381,6 +14812,9 @@ public final class FilterProtos { return filter_; } } + /** + * required .Filter filter = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> getFilterFieldBuilder() { @@ -12394,18 +14828,18 @@ public final class FilterProtos { } return filterBuilder_; } - + // @@protoc_insertion_point(builder_scope:WhileMatchFilter) } - + static { defaultInstance = new WhileMatchFilter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:WhileMatchFilter) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_ColumnCountGetFilter_descriptor; private static @@ -12536,7 +14970,7 @@ public final class FilterProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_WhileMatchFilter_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -12601,209 +15035,157 @@ public final class FilterProtos { internal_static_ColumnCountGetFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ColumnCountGetFilter_descriptor, - new java.lang.String[] { "Limit", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); + new java.lang.String[] { "Limit", }); internal_static_ColumnPaginationFilter_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_ColumnPaginationFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ColumnPaginationFilter_descriptor, - new java.lang.String[] { "Limit", "Offset", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); + new java.lang.String[] { "Limit", "Offset", }); internal_static_ColumnPrefixFilter_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_ColumnPrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ColumnPrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); + new java.lang.String[] { "Prefix", }); internal_static_ColumnRangeFilter_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_ColumnRangeFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ColumnRangeFilter_descriptor, - new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); + new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }); internal_static_CompareFilter_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_CompareFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CompareFilter_descriptor, - new java.lang.String[] { "CompareOp", "Comparator", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); + new java.lang.String[] { "CompareOp", "Comparator", }); internal_static_DependentColumnFilter_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_DependentColumnFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DependentColumnFilter_descriptor, - new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); + new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }); internal_static_FamilyFilter_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_FamilyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FamilyFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); + new java.lang.String[] { "CompareFilter", }); internal_static_FilterList_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_FilterList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FilterList_descriptor, - new java.lang.String[] { "Operator", "Filters", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); + new java.lang.String[] { "Operator", "Filters", }); internal_static_FilterWrapper_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_FilterWrapper_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FilterWrapper_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); + new java.lang.String[] { "Filter", }); internal_static_FirstKeyOnlyFilter_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_FirstKeyOnlyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FirstKeyOnlyFilter_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); + new java.lang.String[] { }); internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor, - new java.lang.String[] { "Qualifiers", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); + new java.lang.String[] { "Qualifiers", }); internal_static_FuzzyRowFilter_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_FuzzyRowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FuzzyRowFilter_descriptor, - new java.lang.String[] { "FuzzyKeysData", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); + new java.lang.String[] { "FuzzyKeysData", }); internal_static_InclusiveStopFilter_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_InclusiveStopFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_InclusiveStopFilter_descriptor, - new java.lang.String[] { "StopRowKey", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); + new java.lang.String[] { "StopRowKey", }); internal_static_KeyOnlyFilter_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_KeyOnlyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_KeyOnlyFilter_descriptor, - new java.lang.String[] { "LenAsVal", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); + new java.lang.String[] { "LenAsVal", }); internal_static_MultipleColumnPrefixFilter_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_MultipleColumnPrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultipleColumnPrefixFilter_descriptor, - new java.lang.String[] { "SortedPrefixes", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); + new java.lang.String[] { "SortedPrefixes", }); internal_static_PageFilter_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_PageFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PageFilter_descriptor, - new java.lang.String[] { "PageSize", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); + new java.lang.String[] { "PageSize", }); internal_static_PrefixFilter_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_PrefixFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); + new java.lang.String[] { "Prefix", }); internal_static_QualifierFilter_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_QualifierFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_QualifierFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); + new java.lang.String[] { "CompareFilter", }); internal_static_RandomRowFilter_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_RandomRowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RandomRowFilter_descriptor, - new java.lang.String[] { "Chance", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); + new java.lang.String[] { "Chance", }); internal_static_RowFilter_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_RowFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); + new java.lang.String[] { "CompareFilter", }); internal_static_SingleColumnValueExcludeFilter_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SingleColumnValueExcludeFilter_descriptor, - new java.lang.String[] { "SingleColumnValueFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); + new java.lang.String[] { "SingleColumnValueFilter", }); internal_static_SingleColumnValueFilter_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_SingleColumnValueFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SingleColumnValueFilter_descriptor, - new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); + new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", }); internal_static_SkipFilter_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_SkipFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SkipFilter_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); + new java.lang.String[] { "Filter", }); internal_static_TimestampsFilter_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_TimestampsFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TimestampsFilter_descriptor, - new java.lang.String[] { "Timestamps", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); + new java.lang.String[] { "Timestamps", }); internal_static_ValueFilter_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_ValueFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ValueFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); + new java.lang.String[] { "CompareFilter", }); internal_static_WhileMatchFilter_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_WhileMatchFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WhileMatchFilter_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); + new java.lang.String[] { "Filter", }); return null; } }; @@ -12814,6 +15196,6 @@ public final class FilterProtos { org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 23a4f4f..b9a4b66 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -8,26 +8,78 @@ public final class HBaseProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + /** + * Protobuf enum {@code CellType} + * + *
+   **
+   * The type of the key in a Cell
+   * 
+ */ public enum CellType implements com.google.protobuf.ProtocolMessageEnum { + /** + * MINIMUM = 0; + */ MINIMUM(0, 0), + /** + * PUT = 4; + */ PUT(1, 4), + /** + * DELETE = 8; + */ DELETE(2, 8), + /** + * DELETE_COLUMN = 12; + */ DELETE_COLUMN(3, 12), + /** + * DELETE_FAMILY = 14; + */ DELETE_FAMILY(4, 14), + /** + * MAXIMUM = 255; + * + *
+     * MAXIMUM is used when searching; you look from maximum on down.
+     * 
+ */ MAXIMUM(5, 255), ; - + + /** + * MINIMUM = 0; + */ public static final int MINIMUM_VALUE = 0; + /** + * PUT = 4; + */ public static final int PUT_VALUE = 4; + /** + * DELETE = 8; + */ public static final int DELETE_VALUE = 8; + /** + * DELETE_COLUMN = 12; + */ public static final int DELETE_COLUMN_VALUE = 12; + /** + * DELETE_FAMILY = 14; + */ public static final int DELETE_FAMILY_VALUE = 14; + /** + * MAXIMUM = 255; + * + *
+     * MAXIMUM is used when searching; you look from maximum on down.
+     * 
+ */ public static final int MAXIMUM_VALUE = 255; - - + + public final int getNumber() { return value; } - + public static CellType valueOf(int value) { switch (value) { case 0: return MINIMUM; @@ -39,7 +91,7 @@ public final class HBaseProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -51,7 +103,7 @@ public final class HBaseProtos { return CellType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -64,11 +116,9 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); } - - private static final CellType[] VALUES = { - MINIMUM, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, MAXIMUM, - }; - + + private static final CellType[] VALUES = values(); + public static CellType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -77,40 +127,89 @@ public final class HBaseProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private CellType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:CellType) } - + + /** + * Protobuf enum {@code CompareType} + * + *
+   * Comparison operators 
+   * 
+ */ public enum CompareType implements com.google.protobuf.ProtocolMessageEnum { + /** + * LESS = 0; + */ LESS(0, 0), + /** + * LESS_OR_EQUAL = 1; + */ LESS_OR_EQUAL(1, 1), + /** + * EQUAL = 2; + */ EQUAL(2, 2), + /** + * NOT_EQUAL = 3; + */ NOT_EQUAL(3, 3), + /** + * GREATER_OR_EQUAL = 4; + */ GREATER_OR_EQUAL(4, 4), + /** + * GREATER = 5; + */ GREATER(5, 5), + /** + * NO_OP = 6; + */ NO_OP(6, 6), ; - + + /** + * LESS = 0; + */ public static final int LESS_VALUE = 0; + /** + * LESS_OR_EQUAL = 1; + */ public static final int LESS_OR_EQUAL_VALUE = 1; + /** + * EQUAL = 2; + */ public static final int EQUAL_VALUE = 2; + /** + * NOT_EQUAL = 3; + */ public static final int NOT_EQUAL_VALUE = 3; + /** + * GREATER_OR_EQUAL = 4; + */ public static final int GREATER_OR_EQUAL_VALUE = 4; + /** + * GREATER = 5; + */ public static final int GREATER_VALUE = 5; + /** + * NO_OP = 6; + */ public static final int NO_OP_VALUE = 6; - - + + public final int getNumber() { return value; } - + public static CompareType valueOf(int value) { switch (value) { case 0: return LESS; @@ -123,7 +222,7 @@ public final class HBaseProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -135,7 +234,7 @@ public final class HBaseProtos { return CompareType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -148,11 +247,9 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); } - - private static final CompareType[] VALUES = { - LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, - }; - + + private static final CompareType[] VALUES = values(); + public static CompareType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -161,134 +258,309 @@ public final class HBaseProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private CompareType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:CompareType) } - + public interface CellOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes row = 1; + /** + * optional bytes row = 1; + */ boolean hasRow(); + /** + * optional bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // optional bytes family = 2; + /** + * optional bytes family = 2; + */ boolean hasFamily(); + /** + * optional bytes family = 2; + */ com.google.protobuf.ByteString getFamily(); - + // optional bytes qualifier = 3; + /** + * optional bytes qualifier = 3; + */ boolean hasQualifier(); + /** + * optional bytes qualifier = 3; + */ com.google.protobuf.ByteString getQualifier(); - + // optional uint64 timestamp = 4; + /** + * optional uint64 timestamp = 4; + */ boolean hasTimestamp(); + /** + * optional uint64 timestamp = 4; + */ long getTimestamp(); - + // optional .CellType cellType = 5; + /** + * optional .CellType cellType = 5; + */ boolean hasCellType(); + /** + * optional .CellType cellType = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType(); - + // optional bytes value = 6; + /** + * optional bytes value = 6; + */ boolean hasValue(); + /** + * optional bytes value = 6; + */ com.google.protobuf.ByteString getValue(); } + /** + * Protobuf type {@code Cell} + * + *
+   **
+   * Protocol buffer version of Cell.
+   * 
+ */ public static final class Cell extends com.google.protobuf.GeneratedMessage implements CellOrBuilder { // Use Cell.newBuilder() to construct. - private Cell(Builder builder) { + private Cell(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Cell(boolean noInit) {} - + private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Cell defaultInstance; public static Cell getDefaultInstance() { return defaultInstance; } - + public Cell getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Cell( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + timestamp_ = input.readUInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + cellType_ = value; + } + break; + } + case 50: { + bitField0_ |= 0x00000020; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Cell parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Cell(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * optional bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // optional bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; + /** + * optional bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // optional bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } - + // optional .CellType cellType = 5; public static final int CELLTYPE_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType cellType_; + /** + * optional .CellType cellType = 5; + */ public boolean hasCellType() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellType cellType = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType() { return cellType_; } - + // optional bytes value = 6; public static final int VALUE_FIELD_NUMBER = 6; private com.google.protobuf.ByteString value_; + /** + * optional bytes value = 6; + */ public boolean hasValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bytes value = 6; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; family_ = com.google.protobuf.ByteString.EMPTY; @@ -301,11 +573,11 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -329,12 +601,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -364,14 +636,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -381,7 +653,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -417,9 +689,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -447,89 +723,84 @@ public final class HBaseProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Cell} + * + *
+     **
+     * Protocol buffer version of Cell.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellOrBuilder { @@ -537,18 +808,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -559,7 +833,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -576,20 +850,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Cell_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = buildPartial(); if (!result.isInitialized()) { @@ -597,17 +871,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell(this); int from_bitField0_ = bitField0_; @@ -640,7 +904,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell)other); @@ -649,7 +913,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.getDefaultInstance()) return this; if (other.hasRow()) { @@ -673,84 +937,47 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - timestamp_ = input.readUInt64(); - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - cellType_ = value; - } - break; - } - case 50: { - bitField0_ |= 0x00000020; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * optional bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -760,21 +987,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // optional bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * optional bytes family = 2; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -784,21 +1023,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes family = 2; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000002); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // optional bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * optional bytes qualifier = 3; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -808,42 +1059,66 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes qualifier = 3; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000004); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // optional uint64 timestamp = 4; private long timestamp_ ; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } + /** + * optional uint64 timestamp = 4; + */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000008; timestamp_ = value; onChanged(); return this; } + /** + * optional uint64 timestamp = 4; + */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000008); timestamp_ = 0L; onChanged(); return this; } - + // optional .CellType cellType = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType cellType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM; + /** + * optional .CellType cellType = 5; + */ public boolean hasCellType() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellType cellType = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getCellType() { return cellType_; } + /** + * optional .CellType cellType = 5; + */ public Builder setCellType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value) { if (value == null) { throw new NullPointerException(); @@ -853,21 +1128,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional .CellType cellType = 5; + */ public Builder clearCellType() { bitField0_ = (bitField0_ & ~0x00000010); cellType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM; onChanged(); return this; } - + // optional bytes value = 6; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 6; + */ public boolean hasValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bytes value = 6; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * optional bytes value = 6; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -877,163 +1164,374 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes value = 6; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000020); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Cell) } - + static { defaultInstance = new Cell(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Cell) } - + public interface TableSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes name = 1; + /** + * optional bytes name = 1; + */ boolean hasName(); + /** + * optional bytes name = 1; + */ com.google.protobuf.ByteString getName(); - + // repeated .BytesBytesPair attributes = 2; + /** + * repeated .BytesBytesPair attributes = 2; + */ java.util.List getAttributesList(); + /** + * repeated .BytesBytesPair attributes = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); + /** + * repeated .BytesBytesPair attributes = 2; + */ int getAttributesCount(); + /** + * repeated .BytesBytesPair attributes = 2; + */ java.util.List getAttributesOrBuilderList(); + /** + * repeated .BytesBytesPair attributes = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); - + // repeated .ColumnFamilySchema columnFamilies = 3; + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ java.util.List getColumnFamiliesList(); + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ int getColumnFamiliesCount(); + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ java.util.List getColumnFamiliesOrBuilderList(); + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index); - + // repeated .NameStringPair configuration = 4; + /** + * repeated .NameStringPair configuration = 4; + */ java.util.List getConfigurationList(); + /** + * repeated .NameStringPair configuration = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); + /** + * repeated .NameStringPair configuration = 4; + */ int getConfigurationCount(); + /** + * repeated .NameStringPair configuration = 4; + */ java.util.List getConfigurationOrBuilderList(); + /** + * repeated .NameStringPair configuration = 4; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } + /** + * Protobuf type {@code TableSchema} + * + *
+   **
+   * Table Schema
+   * Inspired by the rest TableSchema
+   * 
+ */ public static final class TableSchema extends com.google.protobuf.GeneratedMessage implements TableSchemaOrBuilder { // Use TableSchema.newBuilder() to construct. - private TableSchema(Builder builder) { + private TableSchema(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TableSchema(boolean noInit) {} - + private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TableSchema defaultInstance; public static TableSchema getDefaultInstance() { return defaultInstance; } - + public TableSchema getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TableSchema( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + columnFamilies_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + configuration_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = java.util.Collections.unmodifiableList(attributes_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + configuration_ = java.util.Collections.unmodifiableList(configuration_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TableSchema parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableSchema(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; + /** + * optional bytes name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes name = 1; + */ public com.google.protobuf.ByteString getName() { return name_; } - + // repeated .BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List attributes_; + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesList() { return attributes_; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesOrBuilderList() { return attributes_; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public int getAttributesCount() { return attributes_.size(); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } - + // repeated .ColumnFamilySchema columnFamilies = 3; public static final int COLUMNFAMILIES_FIELD_NUMBER = 3; private java.util.List columnFamilies_; + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public java.util.List getColumnFamiliesList() { return columnFamilies_; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public java.util.List getColumnFamiliesOrBuilderList() { return columnFamilies_; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public int getColumnFamiliesCount() { return columnFamilies_.size(); } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { return columnFamilies_.get(index); } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { return columnFamilies_.get(index); } - + // repeated .NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List configuration_; + /** + * repeated .NameStringPair configuration = 4; + */ public java.util.List getConfigurationList() { return configuration_; } + /** + * repeated .NameStringPair configuration = 4; + */ public java.util.List getConfigurationOrBuilderList() { return configuration_; } + /** + * repeated .NameStringPair configuration = 4; + */ public int getConfigurationCount() { return configuration_.size(); } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } - + private void initFields() { name_ = com.google.protobuf.ByteString.EMPTY; attributes_ = java.util.Collections.emptyList(); @@ -1044,7 +1542,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { memoizedIsInitialized = 0; @@ -1066,7 +1564,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1084,12 +1582,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1111,14 +1609,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1128,7 +1626,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -1145,9 +1643,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -1167,89 +1669,85 @@ public final class HBaseProtos { hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TableSchema} + * + *
+     **
+     * Table Schema
+     * Inspired by the rest TableSchema
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { @@ -1257,18 +1755,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1282,7 +1783,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; @@ -1307,20 +1808,20 @@ public final class HBaseProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); if (!result.isInitialized()) { @@ -1328,17 +1829,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this); int from_bitField0_ = bitField0_; @@ -1378,7 +1869,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other); @@ -1387,7 +1878,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; if (other.hasName()) { @@ -1474,7 +1965,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { @@ -1496,67 +1987,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttributes(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumnFamilies(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addConfiguration(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes name = 1; + */ public com.google.protobuf.ByteString getName() { return name_; } + /** + * optional bytes name = 1; + */ public Builder setName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1566,13 +2033,16 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - + // repeated .BytesBytesPair attributes = 2; private java.util.List attributes_ = java.util.Collections.emptyList(); @@ -1582,10 +2052,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; - + + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); @@ -1593,6 +2066,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessageList(); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); @@ -1600,6 +2076,9 @@ public final class HBaseProtos { return attributesBuilder_.getCount(); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); @@ -1607,6 +2086,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessage(index); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { @@ -1621,6 +2103,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -1632,6 +2117,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { @@ -1645,6 +2133,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { @@ -1659,6 +2150,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -1670,6 +2164,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -1681,6 +2178,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAllAttributes( java.lang.Iterable values) { if (attributesBuilder_ == null) { @@ -1692,6 +2192,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); @@ -1702,6 +2205,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); @@ -1712,10 +2218,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { @@ -1723,6 +2235,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesOrBuilderList() { if (attributesBuilder_ != null) { @@ -1731,15 +2246,24 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(attributes_); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); @@ -1758,7 +2282,7 @@ public final class HBaseProtos { } return attributesBuilder_; } - + // repeated .ColumnFamilySchema columnFamilies = 3; private java.util.List columnFamilies_ = java.util.Collections.emptyList(); @@ -1768,10 +2292,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000004; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; - + + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public java.util.List getColumnFamiliesList() { if (columnFamiliesBuilder_ == null) { return java.util.Collections.unmodifiableList(columnFamilies_); @@ -1779,6 +2306,9 @@ public final class HBaseProtos { return columnFamiliesBuilder_.getMessageList(); } } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public int getColumnFamiliesCount() { if (columnFamiliesBuilder_ == null) { return columnFamilies_.size(); @@ -1786,6 +2316,9 @@ public final class HBaseProtos { return columnFamiliesBuilder_.getCount(); } } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { return columnFamilies_.get(index); @@ -1793,6 +2326,9 @@ public final class HBaseProtos { return columnFamiliesBuilder_.getMessage(index); } } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { @@ -1807,6 +2343,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { @@ -1818,6 +2357,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { @@ -1831,6 +2373,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { @@ -1845,6 +2390,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder addColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { @@ -1856,6 +2404,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { @@ -1867,6 +2418,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder addAllColumnFamilies( java.lang.Iterable values) { if (columnFamiliesBuilder_ == null) { @@ -1878,6 +2432,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = java.util.Collections.emptyList(); @@ -1888,6 +2445,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public Builder removeColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); @@ -1898,10 +2458,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().getBuilder(index); } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { if (columnFamiliesBuilder_ == null) { @@ -1909,6 +2475,9 @@ public final class HBaseProtos { return columnFamiliesBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public java.util.List getColumnFamiliesOrBuilderList() { if (columnFamiliesBuilder_ != null) { @@ -1917,15 +2486,24 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(columnFamilies_); } } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { return getColumnFamiliesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } + /** + * repeated .ColumnFamilySchema columnFamilies = 3; + */ public java.util.List getColumnFamiliesBuilderList() { return getColumnFamiliesFieldBuilder().getBuilderList(); @@ -1944,7 +2522,7 @@ public final class HBaseProtos { } return columnFamiliesBuilder_; } - + // repeated .NameStringPair configuration = 4; private java.util.List configuration_ = java.util.Collections.emptyList(); @@ -1954,10 +2532,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000008; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; - + + /** + * repeated .NameStringPair configuration = 4; + */ public java.util.List getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); @@ -1965,6 +2546,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessageList(); } } + /** + * repeated .NameStringPair configuration = 4; + */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); @@ -1972,6 +2556,9 @@ public final class HBaseProtos { return configurationBuilder_.getCount(); } } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); @@ -1979,6 +2566,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessage(index); } } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { @@ -1993,6 +2583,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -2004,6 +2597,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { @@ -2017,6 +2613,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { @@ -2031,6 +2630,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -2042,6 +2644,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -2053,6 +2658,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder addAllConfiguration( java.lang.Iterable values) { if (configurationBuilder_ == null) { @@ -2064,6 +2672,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); @@ -2074,6 +2685,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); @@ -2084,10 +2698,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { @@ -2095,6 +2715,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameStringPair configuration = 4; + */ public java.util.List getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { @@ -2103,15 +2726,24 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(configuration_); } } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair configuration = 4; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair configuration = 4; + */ public java.util.List getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); @@ -2130,126 +2762,293 @@ public final class HBaseProtos { } return configurationBuilder_; } - + // @@protoc_insertion_point(builder_scope:TableSchema) } - + static { defaultInstance = new TableSchema(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TableSchema) } - + public interface ColumnFamilySchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes name = 1; + /** + * required bytes name = 1; + */ boolean hasName(); + /** + * required bytes name = 1; + */ com.google.protobuf.ByteString getName(); - + // repeated .BytesBytesPair attributes = 2; + /** + * repeated .BytesBytesPair attributes = 2; + */ java.util.List getAttributesList(); + /** + * repeated .BytesBytesPair attributes = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); + /** + * repeated .BytesBytesPair attributes = 2; + */ int getAttributesCount(); + /** + * repeated .BytesBytesPair attributes = 2; + */ java.util.List getAttributesOrBuilderList(); + /** + * repeated .BytesBytesPair attributes = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); - + // repeated .NameStringPair configuration = 3; + /** + * repeated .NameStringPair configuration = 3; + */ java.util.List getConfigurationList(); + /** + * repeated .NameStringPair configuration = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); + /** + * repeated .NameStringPair configuration = 3; + */ int getConfigurationCount(); + /** + * repeated .NameStringPair configuration = 3; + */ java.util.List getConfigurationOrBuilderList(); + /** + * repeated .NameStringPair configuration = 3; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } + /** + * Protobuf type {@code ColumnFamilySchema} + * + *
+   **
+   * Column Family Schema
+   * Inspired by the rest ColumSchemaMessage
+   * 
+ */ public static final class ColumnFamilySchema extends com.google.protobuf.GeneratedMessage implements ColumnFamilySchemaOrBuilder { // Use ColumnFamilySchema.newBuilder() to construct. - private ColumnFamilySchema(Builder builder) { + private ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ColumnFamilySchema(boolean noInit) {} - + private ColumnFamilySchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ColumnFamilySchema defaultInstance; public static ColumnFamilySchema getDefaultInstance() { return defaultInstance; } - + public ColumnFamilySchema getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnFamilySchema( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + configuration_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = java.util.Collections.unmodifiableList(attributes_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + configuration_ = java.util.Collections.unmodifiableList(configuration_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ColumnFamilySchema parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnFamilySchema(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; + /** + * required bytes name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes name = 1; + */ public com.google.protobuf.ByteString getName() { return name_; } - + // repeated .BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List attributes_; + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesList() { return attributes_; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesOrBuilderList() { return attributes_; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public int getAttributesCount() { return attributes_.size(); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } - + // repeated .NameStringPair configuration = 3; public static final int CONFIGURATION_FIELD_NUMBER = 3; private java.util.List configuration_; + /** + * repeated .NameStringPair configuration = 3; + */ public java.util.List getConfigurationList() { return configuration_; } + /** + * repeated .NameStringPair configuration = 3; + */ public java.util.List getConfigurationOrBuilderList() { return configuration_; } + /** + * repeated .NameStringPair configuration = 3; + */ public int getConfigurationCount() { return configuration_.size(); } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } - + private void initFields() { name_ = com.google.protobuf.ByteString.EMPTY; attributes_ = java.util.Collections.emptyList(); @@ -2259,7 +3058,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -2279,7 +3078,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2294,12 +3093,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2317,14 +3116,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2334,7 +3133,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -2349,9 +3148,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -2367,89 +3170,85 @@ public final class HBaseProtos { hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ColumnFamilySchema} + * + *
+     **
+     * Column Family Schema
+     * Inspired by the rest ColumSchemaMessage
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { @@ -2457,18 +3256,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2481,7 +3283,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; @@ -2500,20 +3302,20 @@ public final class HBaseProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); if (!result.isInitialized()) { @@ -2521,17 +3323,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); int from_bitField0_ = bitField0_; @@ -2562,7 +3354,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); @@ -2571,7 +3363,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; if (other.hasName()) { @@ -2632,7 +3424,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -2652,61 +3444,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttributes(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addConfiguration(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes name = 1; + */ public com.google.protobuf.ByteString getName() { return name_; } + /** + * required bytes name = 1; + */ public Builder setName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2716,13 +3490,16 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - + // repeated .BytesBytesPair attributes = 2; private java.util.List attributes_ = java.util.Collections.emptyList(); @@ -2732,10 +3509,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000002; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; - + + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); @@ -2743,6 +3523,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessageList(); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); @@ -2750,6 +3533,9 @@ public final class HBaseProtos { return attributesBuilder_.getCount(); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); @@ -2757,6 +3543,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessage(index); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { @@ -2771,6 +3560,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -2782,6 +3574,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { @@ -2795,6 +3590,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { @@ -2809,6 +3607,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -2820,6 +3621,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { @@ -2831,6 +3635,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder addAllAttributes( java.lang.Iterable values) { if (attributesBuilder_ == null) { @@ -2842,6 +3649,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); @@ -2852,6 +3662,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); @@ -2862,10 +3675,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { @@ -2873,6 +3692,9 @@ public final class HBaseProtos { return attributesBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesOrBuilderList() { if (attributesBuilder_ != null) { @@ -2881,15 +3703,24 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(attributes_); } } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair attributes = 2; + */ public java.util.List getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); @@ -2908,7 +3739,7 @@ public final class HBaseProtos { } return attributesBuilder_; } - + // repeated .NameStringPair configuration = 3; private java.util.List configuration_ = java.util.Collections.emptyList(); @@ -2918,10 +3749,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000004; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; - + + /** + * repeated .NameStringPair configuration = 3; + */ public java.util.List getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); @@ -2929,6 +3763,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessageList(); } } + /** + * repeated .NameStringPair configuration = 3; + */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); @@ -2936,6 +3773,9 @@ public final class HBaseProtos { return configurationBuilder_.getCount(); } } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); @@ -2943,6 +3783,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessage(index); } } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { @@ -2957,6 +3800,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -2968,6 +3814,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { @@ -2981,6 +3830,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { @@ -2995,6 +3847,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -3006,6 +3861,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { @@ -3017,6 +3875,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder addAllConfiguration( java.lang.Iterable values) { if (configurationBuilder_ == null) { @@ -3028,6 +3889,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); @@ -3038,6 +3902,9 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); @@ -3048,10 +3915,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { @@ -3059,6 +3932,9 @@ public final class HBaseProtos { return configurationBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameStringPair configuration = 3; + */ public java.util.List getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { @@ -3067,15 +3943,24 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(configuration_); } } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair configuration = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair configuration = 3; + */ public java.util.List getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); @@ -3094,134 +3979,303 @@ public final class HBaseProtos { } return configurationBuilder_; } - + // @@protoc_insertion_point(builder_scope:ColumnFamilySchema) } - + static { defaultInstance = new ColumnFamilySchema(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ColumnFamilySchema) } - + public interface RegionInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint64 regionId = 1; + /** + * required uint64 regionId = 1; + */ boolean hasRegionId(); + /** + * required uint64 regionId = 1; + */ long getRegionId(); - + // required bytes tableName = 2; + /** + * required bytes tableName = 2; + */ boolean hasTableName(); + /** + * required bytes tableName = 2; + */ com.google.protobuf.ByteString getTableName(); - + // optional bytes startKey = 3; + /** + * optional bytes startKey = 3; + */ boolean hasStartKey(); + /** + * optional bytes startKey = 3; + */ com.google.protobuf.ByteString getStartKey(); - + // optional bytes endKey = 4; + /** + * optional bytes endKey = 4; + */ boolean hasEndKey(); + /** + * optional bytes endKey = 4; + */ com.google.protobuf.ByteString getEndKey(); - + // optional bool offline = 5; + /** + * optional bool offline = 5; + */ boolean hasOffline(); + /** + * optional bool offline = 5; + */ boolean getOffline(); - + // optional bool split = 6; + /** + * optional bool split = 6; + */ boolean hasSplit(); + /** + * optional bool split = 6; + */ boolean getSplit(); } + /** + * Protobuf type {@code RegionInfo} + * + *
+   **
+   * Protocol buffer version of HRegionInfo.
+   * 
+ */ public static final class RegionInfo extends com.google.protobuf.GeneratedMessage implements RegionInfoOrBuilder { // Use RegionInfo.newBuilder() to construct. - private RegionInfo(Builder builder) { + private RegionInfo(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionInfo(boolean noInit) {} - + private RegionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionInfo defaultInstance; public static RegionInfo getDefaultInstance() { return defaultInstance; } - + public RegionInfo getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 regionId = 1; - public static final int REGIONID_FIELD_NUMBER = 1; - private long regionId_; - public boolean hasRegionId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getRegionId() { - return regionId_; - } - - // required bytes tableName = 2; - public static final int TABLENAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // optional bytes startKey = 3; - public static final int STARTKEY_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString startKey_; - public boolean hasStartKey() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getStartKey() { - return startKey_; - } - - // optional bytes endKey = 4; - public static final int ENDKEY_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString endKey_; - public boolean hasEndKey() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getEndKey() { - return endKey_; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - - // optional bool offline = 5; - public static final int OFFLINE_FIELD_NUMBER = 5; - private boolean offline_; + private RegionInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + regionId_ = input.readUInt64(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startKey_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + endKey_ = input.readBytes(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + offline_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + split_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required uint64 regionId = 1; + public static final int REGIONID_FIELD_NUMBER = 1; + private long regionId_; + /** + * required uint64 regionId = 1; + */ + public boolean hasRegionId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint64 regionId = 1; + */ + public long getRegionId() { + return regionId_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 2; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes tableName = 2; + */ + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // optional bytes startKey = 3; + public static final int STARTKEY_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startKey_; + /** + * optional bytes startKey = 3; + */ + public boolean hasStartKey() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes startKey = 3; + */ + public com.google.protobuf.ByteString getStartKey() { + return startKey_; + } + + // optional bytes endKey = 4; + public static final int ENDKEY_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString endKey_; + /** + * optional bytes endKey = 4; + */ + public boolean hasEndKey() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bytes endKey = 4; + */ + public com.google.protobuf.ByteString getEndKey() { + return endKey_; + } + + // optional bool offline = 5; + public static final int OFFLINE_FIELD_NUMBER = 5; + private boolean offline_; + /** + * optional bool offline = 5; + */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool offline = 5; + */ public boolean getOffline() { return offline_; } - + // optional bool split = 6; public static final int SPLIT_FIELD_NUMBER = 6; private boolean split_; + /** + * optional bool split = 6; + */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool split = 6; + */ public boolean getSplit() { return split_; } - + private void initFields() { regionId_ = 0L; tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -3234,7 +4288,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionId()) { memoizedIsInitialized = 0; return false; @@ -3246,7 +4300,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3270,12 +4324,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3305,14 +4359,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3322,7 +4376,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; - + boolean result = true; result = result && (hasRegionId() == other.hasRegionId()); if (hasRegionId()) { @@ -3358,9 +4412,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionId()) { @@ -3388,89 +4446,84 @@ public final class HBaseProtos { hash = (53 * hash) + hashBoolean(getSplit()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionInfo} + * + *
+     **
+     * Protocol buffer version of HRegionInfo.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { @@ -3478,18 +4531,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3500,7 +4556,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); regionId_ = 0L; @@ -3517,20 +4573,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); if (!result.isInitialized()) { @@ -3538,17 +4594,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); int from_bitField0_ = bitField0_; @@ -3581,7 +4627,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); @@ -3590,7 +4636,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; if (other.hasRegionId()) { @@ -3614,7 +4660,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionId()) { @@ -3626,95 +4672,76 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - regionId_ = input.readUInt64(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - startKey_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - endKey_ = input.readBytes(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - offline_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - split_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint64 regionId = 1; private long regionId_ ; + /** + * required uint64 regionId = 1; + */ public boolean hasRegionId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 regionId = 1; + */ public long getRegionId() { return regionId_; } + /** + * required uint64 regionId = 1; + */ public Builder setRegionId(long value) { bitField0_ |= 0x00000001; regionId_ = value; onChanged(); return this; } + /** + * required uint64 regionId = 1; + */ public Builder clearRegionId() { bitField0_ = (bitField0_ & ~0x00000001); regionId_ = 0L; onChanged(); return this; } - + // required bytes tableName = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 2; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes tableName = 2; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 2; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -3724,21 +4751,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes tableName = 2; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // optional bytes startKey = 3; private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes startKey = 3; + */ public boolean hasStartKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes startKey = 3; + */ public com.google.protobuf.ByteString getStartKey() { return startKey_; } + /** + * optional bytes startKey = 3; + */ public Builder setStartKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -3748,21 +4787,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes startKey = 3; + */ public Builder clearStartKey() { bitField0_ = (bitField0_ & ~0x00000004); startKey_ = getDefaultInstance().getStartKey(); onChanged(); return this; } - + // optional bytes endKey = 4; private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes endKey = 4; + */ public boolean hasEndKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bytes endKey = 4; + */ public com.google.protobuf.ByteString getEndKey() { return endKey_; } + /** + * optional bytes endKey = 4; + */ public Builder setEndKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -3772,117 +4823,273 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes endKey = 4; + */ public Builder clearEndKey() { bitField0_ = (bitField0_ & ~0x00000008); endKey_ = getDefaultInstance().getEndKey(); onChanged(); return this; } - + // optional bool offline = 5; private boolean offline_ ; + /** + * optional bool offline = 5; + */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool offline = 5; + */ public boolean getOffline() { return offline_; } + /** + * optional bool offline = 5; + */ public Builder setOffline(boolean value) { bitField0_ |= 0x00000010; offline_ = value; onChanged(); return this; } + /** + * optional bool offline = 5; + */ public Builder clearOffline() { bitField0_ = (bitField0_ & ~0x00000010); offline_ = false; onChanged(); return this; } - + // optional bool split = 6; private boolean split_ ; + /** + * optional bool split = 6; + */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bool split = 6; + */ public boolean getSplit() { return split_; } + /** + * optional bool split = 6; + */ public Builder setSplit(boolean value) { bitField0_ |= 0x00000020; split_ = value; onChanged(); return this; } + /** + * optional bool split = 6; + */ public Builder clearSplit() { bitField0_ = (bitField0_ & ~0x00000020); split_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RegionInfo) } - + static { defaultInstance = new RegionInfo(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionInfo) } - + public interface RegionSpecifierOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier.RegionSpecifierType type = 1; + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ boolean hasType(); + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); - + // required bytes value = 2; + /** + * required bytes value = 2; + */ boolean hasValue(); + /** + * required bytes value = 2; + */ com.google.protobuf.ByteString getValue(); } + /** + * Protobuf type {@code RegionSpecifier} + * + *
+   **
+   * Container protocol buffer to specify a region.
+   * You can specify region by region name, or the hash
+   * of the region name, which is known as encoded
+   * region name.
+   * 
+ */ public static final class RegionSpecifier extends com.google.protobuf.GeneratedMessage implements RegionSpecifierOrBuilder { // Use RegionSpecifier.newBuilder() to construct. - private RegionSpecifier(Builder builder) { + private RegionSpecifier(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionSpecifier(boolean noInit) {} - + private RegionSpecifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionSpecifier defaultInstance; public static RegionSpecifier getDefaultInstance() { return defaultInstance; } - + public RegionSpecifier getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionSpecifier( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + type_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionSpecifier parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionSpecifier(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code RegionSpecifier.RegionSpecifierType} + */ public enum RegionSpecifierType implements com.google.protobuf.ProtocolMessageEnum { + /** + * REGION_NAME = 1; + * + *
+       * <tablename>,<startkey>,<regionId>.<encodedName>
+       * 
+ */ REGION_NAME(0, 1), + /** + * ENCODED_REGION_NAME = 2; + * + *
+       * hash of <tablename>,<startkey>,<regionId>
+       * 
+ */ ENCODED_REGION_NAME(1, 2), ; - + + /** + * REGION_NAME = 1; + * + *
+       * <tablename>,<startkey>,<regionId>.<encodedName>
+       * 
+ */ public static final int REGION_NAME_VALUE = 1; + /** + * ENCODED_REGION_NAME = 2; + * + *
+       * hash of <tablename>,<startkey>,<regionId>
+       * 
+ */ public static final int ENCODED_REGION_NAME_VALUE = 2; - - + + public final int getNumber() { return value; } - + public static RegionSpecifierType valueOf(int value) { switch (value) { case 1: return REGION_NAME; @@ -3890,7 +5097,7 @@ public final class HBaseProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -3902,7 +5109,7 @@ public final class HBaseProtos { return RegionSpecifierType.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -3915,11 +5122,9 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); } - - private static final RegionSpecifierType[] VALUES = { - REGION_NAME, ENCODED_REGION_NAME, - }; - + + private static final RegionSpecifierType[] VALUES = values(); + public static RegionSpecifierType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -3928,39 +5133,51 @@ public final class HBaseProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private RegionSpecifierType(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:RegionSpecifier.RegionSpecifierType) } - + private int bitField0_; // required .RegionSpecifier.RegionSpecifierType type = 1; public static final int TYPE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { return type_; } - + // required bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; + /** + * required bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + private void initFields() { type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; value_ = com.google.protobuf.ByteString.EMPTY; @@ -3969,7 +5186,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasType()) { memoizedIsInitialized = 0; return false; @@ -3981,7 +5198,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3993,12 +5210,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4012,14 +5229,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4029,7 +5246,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; - + boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { @@ -4045,9 +5262,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { @@ -4059,89 +5280,87 @@ public final class HBaseProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionSpecifier} + * + *
+     **
+     * Container protocol buffer to specify a region.
+     * You can specify region by region name, or the hash
+     * of the region name, which is known as encoded
+     * region name.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { @@ -4149,18 +5368,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4171,7 +5393,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; @@ -4180,20 +5402,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); if (!result.isInitialized()) { @@ -4201,17 +5423,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); int from_bitField0_ = bitField0_; @@ -4228,7 +5440,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); @@ -4237,7 +5449,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; if (other.hasType()) { @@ -4249,7 +5461,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasType()) { @@ -4261,60 +5473,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - type_ = value; - } - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier.RegionSpecifierType type = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { return type_; } + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { if (value == null) { throw new NullPointerException(); @@ -4324,21 +5519,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required .RegionSpecifier.RegionSpecifierType type = 1; + */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; onChanged(); return this; } - + // required bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * required bytes value = 2; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -4348,270 +5555,868 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes value = 2; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - - // @@protoc_insertion_point(builder_scope:RegionSpecifier) - } - - static { - defaultInstance = new RegionSpecifier(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionSpecifier) - } - - public interface RegionLoadOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier regionSpecifier = 1; - boolean hasRegionSpecifier(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder(); - - // optional uint32 stores = 2; - boolean hasStores(); - int getStores(); - - // optional uint32 storefiles = 3; - boolean hasStorefiles(); - int getStorefiles(); - - // optional uint32 storeUncompressedSizeMB = 4; - boolean hasStoreUncompressedSizeMB(); - int getStoreUncompressedSizeMB(); - - // optional uint32 storefileSizeMB = 5; - boolean hasStorefileSizeMB(); - int getStorefileSizeMB(); - - // optional uint32 memstoreSizeMB = 6; - boolean hasMemstoreSizeMB(); - int getMemstoreSizeMB(); - - // optional uint32 storefileIndexSizeMB = 7; - boolean hasStorefileIndexSizeMB(); - int getStorefileIndexSizeMB(); - - // optional uint64 readRequestsCount = 8; - boolean hasReadRequestsCount(); - long getReadRequestsCount(); - - // optional uint64 writeRequestsCount = 9; - boolean hasWriteRequestsCount(); - long getWriteRequestsCount(); - - // optional uint64 totalCompactingKVs = 10; - boolean hasTotalCompactingKVs(); - long getTotalCompactingKVs(); - - // optional uint64 currentCompactedKVs = 11; - boolean hasCurrentCompactedKVs(); - long getCurrentCompactedKVs(); - - // optional uint32 rootIndexSizeKB = 12; - boolean hasRootIndexSizeKB(); - int getRootIndexSizeKB(); - - // optional uint32 totalStaticIndexSizeKB = 13; - boolean hasTotalStaticIndexSizeKB(); - int getTotalStaticIndexSizeKB(); - - // optional uint32 totalStaticBloomSizeKB = 14; - boolean hasTotalStaticBloomSizeKB(); - int getTotalStaticBloomSizeKB(); - - // optional uint64 completeSequenceId = 15; - boolean hasCompleteSequenceId(); - long getCompleteSequenceId(); - } - public static final class RegionLoad extends - com.google.protobuf.GeneratedMessage - implements RegionLoadOrBuilder { - // Use RegionLoad.newBuilder() to construct. - private RegionLoad(Builder builder) { - super(builder); - } - private RegionLoad(boolean noInit) {} - - private static final RegionLoad defaultInstance; - public static RegionLoad getDefaultInstance() { - return defaultInstance; - } - - public RegionLoad getDefaultInstanceForType() { - return defaultInstance; + + // @@protoc_insertion_point(builder_scope:RegionSpecifier) + } + + static { + defaultInstance = new RegionSpecifier(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionSpecifier) + } + + public interface RegionLoadOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier regionSpecifier = 1; + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ + boolean hasRegionSpecifier(); + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier(); + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder(); + + // optional uint32 stores = 2; + /** + * optional uint32 stores = 2; + * + *
+     ** the number of stores for the region 
+     * 
+ */ + boolean hasStores(); + /** + * optional uint32 stores = 2; + * + *
+     ** the number of stores for the region 
+     * 
+ */ + int getStores(); + + // optional uint32 storefiles = 3; + /** + * optional uint32 storefiles = 3; + * + *
+     ** the number of storefiles for the region 
+     * 
+ */ + boolean hasStorefiles(); + /** + * optional uint32 storefiles = 3; + * + *
+     ** the number of storefiles for the region 
+     * 
+ */ + int getStorefiles(); + + // optional uint32 storeUncompressedSizeMB = 4; + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+     ** the total size of the store files for the region, uncompressed, in MB 
+     * 
+ */ + boolean hasStoreUncompressedSizeMB(); + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+     ** the total size of the store files for the region, uncompressed, in MB 
+     * 
+ */ + int getStoreUncompressedSizeMB(); + + // optional uint32 storefileSizeMB = 5; + /** + * optional uint32 storefileSizeMB = 5; + * + *
+     ** the current total size of the store files for the region, in MB 
+     * 
+ */ + boolean hasStorefileSizeMB(); + /** + * optional uint32 storefileSizeMB = 5; + * + *
+     ** the current total size of the store files for the region, in MB 
+     * 
+ */ + int getStorefileSizeMB(); + + // optional uint32 memstoreSizeMB = 6; + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+     ** the current size of the memstore for the region, in MB 
+     * 
+ */ + boolean hasMemstoreSizeMB(); + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+     ** the current size of the memstore for the region, in MB 
+     * 
+ */ + int getMemstoreSizeMB(); + + // optional uint32 storefileIndexSizeMB = 7; + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+     **
+     * The current total size of root-level store file indexes for the region,
+     * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+     * 
+ */ + boolean hasStorefileIndexSizeMB(); + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+     **
+     * The current total size of root-level store file indexes for the region,
+     * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+     * 
+ */ + int getStorefileIndexSizeMB(); + + // optional uint64 readRequestsCount = 8; + /** + * optional uint64 readRequestsCount = 8; + * + *
+     ** the current total read requests made to region 
+     * 
+ */ + boolean hasReadRequestsCount(); + /** + * optional uint64 readRequestsCount = 8; + * + *
+     ** the current total read requests made to region 
+     * 
+ */ + long getReadRequestsCount(); + + // optional uint64 writeRequestsCount = 9; + /** + * optional uint64 writeRequestsCount = 9; + * + *
+     ** the current total write requests made to region 
+     * 
+ */ + boolean hasWriteRequestsCount(); + /** + * optional uint64 writeRequestsCount = 9; + * + *
+     ** the current total write requests made to region 
+     * 
+ */ + long getWriteRequestsCount(); + + // optional uint64 totalCompactingKVs = 10; + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+     ** the total compacting key values in currently running compaction 
+     * 
+ */ + boolean hasTotalCompactingKVs(); + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+     ** the total compacting key values in currently running compaction 
+     * 
+ */ + long getTotalCompactingKVs(); + + // optional uint64 currentCompactedKVs = 11; + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+     ** the completed count of key values in currently running compaction 
+     * 
+ */ + boolean hasCurrentCompactedKVs(); + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+     ** the completed count of key values in currently running compaction 
+     * 
+ */ + long getCurrentCompactedKVs(); + + // optional uint32 rootIndexSizeKB = 12; + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+     ** The current total size of root-level indexes for the region, in KB. 
+     * 
+ */ + boolean hasRootIndexSizeKB(); + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+     ** The current total size of root-level indexes for the region, in KB. 
+     * 
+ */ + int getRootIndexSizeKB(); + + // optional uint32 totalStaticIndexSizeKB = 13; + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+     ** The total size of all index blocks, not just the root level, in KB. 
+     * 
+ */ + boolean hasTotalStaticIndexSizeKB(); + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+     ** The total size of all index blocks, not just the root level, in KB. 
+     * 
+ */ + int getTotalStaticIndexSizeKB(); + + // optional uint32 totalStaticBloomSizeKB = 14; + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+     **
+     * The total size of all Bloom filter blocks, not just loaded into the
+     * block cache, in KB.
+     * 
+ */ + boolean hasTotalStaticBloomSizeKB(); + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+     **
+     * The total size of all Bloom filter blocks, not just loaded into the
+     * block cache, in KB.
+     * 
+ */ + int getTotalStaticBloomSizeKB(); + + // optional uint64 completeSequenceId = 15; + /** + * optional uint64 completeSequenceId = 15; + * + *
+     ** the most recent sequence Id from cache flush 
+     * 
+ */ + boolean hasCompleteSequenceId(); + /** + * optional uint64 completeSequenceId = 15; + * + *
+     ** the most recent sequence Id from cache flush 
+     * 
+ */ + long getCompleteSequenceId(); + } + /** + * Protobuf type {@code RegionLoad} + */ + public static final class RegionLoad extends + com.google.protobuf.GeneratedMessage + implements RegionLoadOrBuilder { + // Use RegionLoad.newBuilder() to construct. + private RegionLoad(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionLoad(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionLoad defaultInstance; + public static RegionLoad getDefaultInstance() { + return defaultInstance; + } + + public RegionLoad getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionLoad( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = regionSpecifier_.toBuilder(); + } + regionSpecifier_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionSpecifier_); + regionSpecifier_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + stores_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + storefiles_ = input.readUInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + storeUncompressedSizeMB_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + storefileSizeMB_ = input.readUInt32(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + memstoreSizeMB_ = input.readUInt32(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + storefileIndexSizeMB_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + readRequestsCount_ = input.readUInt64(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + writeRequestsCount_ = input.readUInt64(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + totalCompactingKVs_ = input.readUInt64(); + break; + } + case 88: { + bitField0_ |= 0x00000400; + currentCompactedKVs_ = input.readUInt64(); + break; + } + case 96: { + bitField0_ |= 0x00000800; + rootIndexSizeKB_ = input.readUInt32(); + break; + } + case 104: { + bitField0_ |= 0x00001000; + totalStaticIndexSizeKB_ = input.readUInt32(); + break; + } + case 112: { + bitField0_ |= 0x00002000; + totalStaticBloomSizeKB_ = input.readUInt32(); + break; + } + case 120: { + bitField0_ |= 0x00004000; + completeSequenceId_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionLoad parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionLoad(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier regionSpecifier = 1; public static final int REGIONSPECIFIER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_; + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ public boolean hasRegionSpecifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { return regionSpecifier_; } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+     ** the region specifier 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { return regionSpecifier_; } - + // optional uint32 stores = 2; public static final int STORES_FIELD_NUMBER = 2; private int stores_; + /** + * optional uint32 stores = 2; + * + *
+     ** the number of stores for the region 
+     * 
+ */ public boolean hasStores() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 stores = 2; + * + *
+     ** the number of stores for the region 
+     * 
+ */ public int getStores() { return stores_; } - + // optional uint32 storefiles = 3; public static final int STOREFILES_FIELD_NUMBER = 3; private int storefiles_; + /** + * optional uint32 storefiles = 3; + * + *
+     ** the number of storefiles for the region 
+     * 
+ */ public boolean hasStorefiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint32 storefiles = 3; + * + *
+     ** the number of storefiles for the region 
+     * 
+ */ public int getStorefiles() { return storefiles_; } - + // optional uint32 storeUncompressedSizeMB = 4; public static final int STOREUNCOMPRESSEDSIZEMB_FIELD_NUMBER = 4; private int storeUncompressedSizeMB_; + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+     ** the total size of the store files for the region, uncompressed, in MB 
+     * 
+ */ public boolean hasStoreUncompressedSizeMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+     ** the total size of the store files for the region, uncompressed, in MB 
+     * 
+ */ public int getStoreUncompressedSizeMB() { return storeUncompressedSizeMB_; } - + // optional uint32 storefileSizeMB = 5; public static final int STOREFILESIZEMB_FIELD_NUMBER = 5; private int storefileSizeMB_; + /** + * optional uint32 storefileSizeMB = 5; + * + *
+     ** the current total size of the store files for the region, in MB 
+     * 
+ */ public boolean hasStorefileSizeMB() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint32 storefileSizeMB = 5; + * + *
+     ** the current total size of the store files for the region, in MB 
+     * 
+ */ public int getStorefileSizeMB() { return storefileSizeMB_; } - + // optional uint32 memstoreSizeMB = 6; public static final int MEMSTORESIZEMB_FIELD_NUMBER = 6; private int memstoreSizeMB_; + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+     ** the current size of the memstore for the region, in MB 
+     * 
+ */ public boolean hasMemstoreSizeMB() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+     ** the current size of the memstore for the region, in MB 
+     * 
+ */ public int getMemstoreSizeMB() { return memstoreSizeMB_; } - + // optional uint32 storefileIndexSizeMB = 7; public static final int STOREFILEINDEXSIZEMB_FIELD_NUMBER = 7; private int storefileIndexSizeMB_; + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+     **
+     * The current total size of root-level store file indexes for the region,
+     * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+     * 
+ */ public boolean hasStorefileIndexSizeMB() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+     **
+     * The current total size of root-level store file indexes for the region,
+     * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+     * 
+ */ public int getStorefileIndexSizeMB() { return storefileIndexSizeMB_; } - + // optional uint64 readRequestsCount = 8; public static final int READREQUESTSCOUNT_FIELD_NUMBER = 8; private long readRequestsCount_; + /** + * optional uint64 readRequestsCount = 8; + * + *
+     ** the current total read requests made to region 
+     * 
+ */ public boolean hasReadRequestsCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint64 readRequestsCount = 8; + * + *
+     ** the current total read requests made to region 
+     * 
+ */ public long getReadRequestsCount() { return readRequestsCount_; } - + // optional uint64 writeRequestsCount = 9; public static final int WRITEREQUESTSCOUNT_FIELD_NUMBER = 9; private long writeRequestsCount_; + /** + * optional uint64 writeRequestsCount = 9; + * + *
+     ** the current total write requests made to region 
+     * 
+ */ public boolean hasWriteRequestsCount() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint64 writeRequestsCount = 9; + * + *
+     ** the current total write requests made to region 
+     * 
+ */ public long getWriteRequestsCount() { return writeRequestsCount_; } - + // optional uint64 totalCompactingKVs = 10; public static final int TOTALCOMPACTINGKVS_FIELD_NUMBER = 10; private long totalCompactingKVs_; + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+     ** the total compacting key values in currently running compaction 
+     * 
+ */ public boolean hasTotalCompactingKVs() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+     ** the total compacting key values in currently running compaction 
+     * 
+ */ public long getTotalCompactingKVs() { return totalCompactingKVs_; } - + // optional uint64 currentCompactedKVs = 11; public static final int CURRENTCOMPACTEDKVS_FIELD_NUMBER = 11; private long currentCompactedKVs_; + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+     ** the completed count of key values in currently running compaction 
+     * 
+ */ public boolean hasCurrentCompactedKVs() { return ((bitField0_ & 0x00000400) == 0x00000400); } + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+     ** the completed count of key values in currently running compaction 
+     * 
+ */ public long getCurrentCompactedKVs() { return currentCompactedKVs_; } - + // optional uint32 rootIndexSizeKB = 12; public static final int ROOTINDEXSIZEKB_FIELD_NUMBER = 12; private int rootIndexSizeKB_; + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+     ** The current total size of root-level indexes for the region, in KB. 
+     * 
+ */ public boolean hasRootIndexSizeKB() { return ((bitField0_ & 0x00000800) == 0x00000800); } + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+     ** The current total size of root-level indexes for the region, in KB. 
+     * 
+ */ public int getRootIndexSizeKB() { return rootIndexSizeKB_; } - + // optional uint32 totalStaticIndexSizeKB = 13; public static final int TOTALSTATICINDEXSIZEKB_FIELD_NUMBER = 13; private int totalStaticIndexSizeKB_; + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+     ** The total size of all index blocks, not just the root level, in KB. 
+     * 
+ */ public boolean hasTotalStaticIndexSizeKB() { return ((bitField0_ & 0x00001000) == 0x00001000); } + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+     ** The total size of all index blocks, not just the root level, in KB. 
+     * 
+ */ public int getTotalStaticIndexSizeKB() { return totalStaticIndexSizeKB_; } - + // optional uint32 totalStaticBloomSizeKB = 14; public static final int TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER = 14; private int totalStaticBloomSizeKB_; + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+     **
+     * The total size of all Bloom filter blocks, not just loaded into the
+     * block cache, in KB.
+     * 
+ */ public boolean hasTotalStaticBloomSizeKB() { return ((bitField0_ & 0x00002000) == 0x00002000); } + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+     **
+     * The total size of all Bloom filter blocks, not just loaded into the
+     * block cache, in KB.
+     * 
+ */ public int getTotalStaticBloomSizeKB() { return totalStaticBloomSizeKB_; } - + // optional uint64 completeSequenceId = 15; public static final int COMPLETESEQUENCEID_FIELD_NUMBER = 15; private long completeSequenceId_; + /** + * optional uint64 completeSequenceId = 15; + * + *
+     ** the most recent sequence Id from cache flush 
+     * 
+ */ public boolean hasCompleteSequenceId() { return ((bitField0_ & 0x00004000) == 0x00004000); } + /** + * optional uint64 completeSequenceId = 15; + * + *
+     ** the most recent sequence Id from cache flush 
+     * 
+ */ public long getCompleteSequenceId() { return completeSequenceId_; } - + private void initFields() { regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); stores_ = 0; @@ -4633,7 +6438,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionSpecifier()) { memoizedIsInitialized = 0; return false; @@ -4645,7 +6450,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4696,12 +6501,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4767,14 +6572,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4784,7 +6589,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) obj; - + boolean result = true; result = result && (hasRegionSpecifier() == other.hasRegionSpecifier()); if (hasRegionSpecifier()) { @@ -4865,9 +6670,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionSpecifier()) { @@ -4931,89 +6740,79 @@ public final class HBaseProtos { hash = (53 * hash) + hashLong(getCompleteSequenceId()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionLoad} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder { @@ -5021,18 +6820,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5044,7 +6846,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionSpecifierBuilder_ == null) { @@ -5083,20 +6885,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00004000); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); if (!result.isInitialized()) { @@ -5104,17 +6906,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad(this); int from_bitField0_ = bitField0_; @@ -5187,7 +6979,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad)other); @@ -5196,7 +6988,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()) return this; if (other.hasRegionSpecifier()) { @@ -5247,7 +7039,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionSpecifier()) { @@ -5259,122 +7051,47 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionSpecifier()) { - subBuilder.mergeFrom(getRegionSpecifier()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionSpecifier(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - stores_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - storefiles_ = input.readUInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - storeUncompressedSizeMB_ = input.readUInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - storefileSizeMB_ = input.readUInt32(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - memstoreSizeMB_ = input.readUInt32(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - storefileIndexSizeMB_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - readRequestsCount_ = input.readUInt64(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - writeRequestsCount_ = input.readUInt64(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - totalCompactingKVs_ = input.readUInt64(); - break; - } - case 88: { - bitField0_ |= 0x00000400; - currentCompactedKVs_ = input.readUInt64(); - break; - } - case 96: { - bitField0_ |= 0x00000800; - rootIndexSizeKB_ = input.readUInt32(); - break; - } - case 104: { - bitField0_ |= 0x00001000; - totalStaticIndexSizeKB_ = input.readUInt32(); - break; - } - case 112: { - bitField0_ |= 0x00002000; - totalStaticBloomSizeKB_ = input.readUInt32(); - break; - } - case 120: { - bitField0_ |= 0x00004000; - completeSequenceId_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier regionSpecifier = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionSpecifierBuilder_; + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public boolean hasRegionSpecifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { if (regionSpecifierBuilder_ == null) { return regionSpecifier_; @@ -5382,6 +7099,13 @@ public final class HBaseProtos { return regionSpecifierBuilder_.getMessage(); } } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public Builder setRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionSpecifierBuilder_ == null) { if (value == null) { @@ -5395,6 +7119,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public Builder setRegionSpecifier( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionSpecifierBuilder_ == null) { @@ -5406,6 +7137,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public Builder mergeRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionSpecifierBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -5422,6 +7160,13 @@ public final class HBaseProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public Builder clearRegionSpecifier() { if (regionSpecifierBuilder_ == null) { regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -5432,11 +7177,25 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionSpecifierBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionSpecifierFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { if (regionSpecifierBuilder_ != null) { return regionSpecifierBuilder_.getMessageOrBuilder(); @@ -5444,6 +7203,13 @@ public final class HBaseProtos { return regionSpecifier_; } } + /** + * required .RegionSpecifier regionSpecifier = 1; + * + *
+       ** the region specifier 
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionSpecifierFieldBuilder() { @@ -5457,504 +7223,1425 @@ public final class HBaseProtos { } return regionSpecifierBuilder_; } - + // optional uint32 stores = 2; private int stores_ ; + /** + * optional uint32 stores = 2; + * + *
+       ** the number of stores for the region 
+       * 
+ */ public boolean hasStores() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 stores = 2; + * + *
+       ** the number of stores for the region 
+       * 
+ */ public int getStores() { return stores_; } + /** + * optional uint32 stores = 2; + * + *
+       ** the number of stores for the region 
+       * 
+ */ public Builder setStores(int value) { bitField0_ |= 0x00000002; stores_ = value; onChanged(); return this; } + /** + * optional uint32 stores = 2; + * + *
+       ** the number of stores for the region 
+       * 
+ */ public Builder clearStores() { bitField0_ = (bitField0_ & ~0x00000002); stores_ = 0; onChanged(); return this; } - + // optional uint32 storefiles = 3; private int storefiles_ ; + /** + * optional uint32 storefiles = 3; + * + *
+       ** the number of storefiles for the region 
+       * 
+ */ public boolean hasStorefiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint32 storefiles = 3; + * + *
+       ** the number of storefiles for the region 
+       * 
+ */ public int getStorefiles() { return storefiles_; } + /** + * optional uint32 storefiles = 3; + * + *
+       ** the number of storefiles for the region 
+       * 
+ */ public Builder setStorefiles(int value) { bitField0_ |= 0x00000004; storefiles_ = value; onChanged(); return this; } + /** + * optional uint32 storefiles = 3; + * + *
+       ** the number of storefiles for the region 
+       * 
+ */ public Builder clearStorefiles() { bitField0_ = (bitField0_ & ~0x00000004); storefiles_ = 0; onChanged(); return this; } - + // optional uint32 storeUncompressedSizeMB = 4; private int storeUncompressedSizeMB_ ; + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+       ** the total size of the store files for the region, uncompressed, in MB 
+       * 
+ */ public boolean hasStoreUncompressedSizeMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+       ** the total size of the store files for the region, uncompressed, in MB 
+       * 
+ */ public int getStoreUncompressedSizeMB() { return storeUncompressedSizeMB_; } + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+       ** the total size of the store files for the region, uncompressed, in MB 
+       * 
+ */ public Builder setStoreUncompressedSizeMB(int value) { bitField0_ |= 0x00000008; storeUncompressedSizeMB_ = value; onChanged(); return this; } + /** + * optional uint32 storeUncompressedSizeMB = 4; + * + *
+       ** the total size of the store files for the region, uncompressed, in MB 
+       * 
+ */ public Builder clearStoreUncompressedSizeMB() { bitField0_ = (bitField0_ & ~0x00000008); storeUncompressedSizeMB_ = 0; onChanged(); return this; } - + // optional uint32 storefileSizeMB = 5; private int storefileSizeMB_ ; + /** + * optional uint32 storefileSizeMB = 5; + * + *
+       ** the current total size of the store files for the region, in MB 
+       * 
+ */ public boolean hasStorefileSizeMB() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint32 storefileSizeMB = 5; + * + *
+       ** the current total size of the store files for the region, in MB 
+       * 
+ */ public int getStorefileSizeMB() { return storefileSizeMB_; } + /** + * optional uint32 storefileSizeMB = 5; + * + *
+       ** the current total size of the store files for the region, in MB 
+       * 
+ */ public Builder setStorefileSizeMB(int value) { bitField0_ |= 0x00000010; storefileSizeMB_ = value; onChanged(); return this; } + /** + * optional uint32 storefileSizeMB = 5; + * + *
+       ** the current total size of the store files for the region, in MB 
+       * 
+ */ public Builder clearStorefileSizeMB() { bitField0_ = (bitField0_ & ~0x00000010); storefileSizeMB_ = 0; onChanged(); return this; } - + // optional uint32 memstoreSizeMB = 6; private int memstoreSizeMB_ ; + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+       ** the current size of the memstore for the region, in MB 
+       * 
+ */ public boolean hasMemstoreSizeMB() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+       ** the current size of the memstore for the region, in MB 
+       * 
+ */ public int getMemstoreSizeMB() { return memstoreSizeMB_; } + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+       ** the current size of the memstore for the region, in MB 
+       * 
+ */ public Builder setMemstoreSizeMB(int value) { bitField0_ |= 0x00000020; memstoreSizeMB_ = value; onChanged(); return this; } + /** + * optional uint32 memstoreSizeMB = 6; + * + *
+       ** the current size of the memstore for the region, in MB 
+       * 
+ */ public Builder clearMemstoreSizeMB() { bitField0_ = (bitField0_ & ~0x00000020); memstoreSizeMB_ = 0; onChanged(); return this; } - + // optional uint32 storefileIndexSizeMB = 7; private int storefileIndexSizeMB_ ; + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+       **
+       * The current total size of root-level store file indexes for the region,
+       * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+       * 
+ */ public boolean hasStorefileIndexSizeMB() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+       **
+       * The current total size of root-level store file indexes for the region,
+       * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+       * 
+ */ public int getStorefileIndexSizeMB() { return storefileIndexSizeMB_; } + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+       **
+       * The current total size of root-level store file indexes for the region,
+       * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+       * 
+ */ public Builder setStorefileIndexSizeMB(int value) { bitField0_ |= 0x00000040; storefileIndexSizeMB_ = value; onChanged(); return this; } + /** + * optional uint32 storefileIndexSizeMB = 7; + * + *
+       **
+       * The current total size of root-level store file indexes for the region,
+       * in MB. The same as {@link #rootIndexSizeKB} but in MB.
+       * 
+ */ public Builder clearStorefileIndexSizeMB() { bitField0_ = (bitField0_ & ~0x00000040); storefileIndexSizeMB_ = 0; onChanged(); return this; } - + // optional uint64 readRequestsCount = 8; private long readRequestsCount_ ; + /** + * optional uint64 readRequestsCount = 8; + * + *
+       ** the current total read requests made to region 
+       * 
+ */ public boolean hasReadRequestsCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint64 readRequestsCount = 8; + * + *
+       ** the current total read requests made to region 
+       * 
+ */ public long getReadRequestsCount() { return readRequestsCount_; } + /** + * optional uint64 readRequestsCount = 8; + * + *
+       ** the current total read requests made to region 
+       * 
+ */ public Builder setReadRequestsCount(long value) { bitField0_ |= 0x00000080; readRequestsCount_ = value; onChanged(); return this; } + /** + * optional uint64 readRequestsCount = 8; + * + *
+       ** the current total read requests made to region 
+       * 
+ */ public Builder clearReadRequestsCount() { bitField0_ = (bitField0_ & ~0x00000080); readRequestsCount_ = 0L; onChanged(); return this; } - + // optional uint64 writeRequestsCount = 9; private long writeRequestsCount_ ; + /** + * optional uint64 writeRequestsCount = 9; + * + *
+       ** the current total write requests made to region 
+       * 
+ */ public boolean hasWriteRequestsCount() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint64 writeRequestsCount = 9; + * + *
+       ** the current total write requests made to region 
+       * 
+ */ public long getWriteRequestsCount() { return writeRequestsCount_; } + /** + * optional uint64 writeRequestsCount = 9; + * + *
+       ** the current total write requests made to region 
+       * 
+ */ public Builder setWriteRequestsCount(long value) { bitField0_ |= 0x00000100; writeRequestsCount_ = value; onChanged(); return this; } + /** + * optional uint64 writeRequestsCount = 9; + * + *
+       ** the current total write requests made to region 
+       * 
+ */ public Builder clearWriteRequestsCount() { bitField0_ = (bitField0_ & ~0x00000100); writeRequestsCount_ = 0L; onChanged(); return this; } - + // optional uint64 totalCompactingKVs = 10; private long totalCompactingKVs_ ; + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+       ** the total compacting key values in currently running compaction 
+       * 
+ */ public boolean hasTotalCompactingKVs() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+       ** the total compacting key values in currently running compaction 
+       * 
+ */ public long getTotalCompactingKVs() { return totalCompactingKVs_; } + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+       ** the total compacting key values in currently running compaction 
+       * 
+ */ public Builder setTotalCompactingKVs(long value) { bitField0_ |= 0x00000200; totalCompactingKVs_ = value; onChanged(); return this; } + /** + * optional uint64 totalCompactingKVs = 10; + * + *
+       ** the total compacting key values in currently running compaction 
+       * 
+ */ public Builder clearTotalCompactingKVs() { bitField0_ = (bitField0_ & ~0x00000200); totalCompactingKVs_ = 0L; onChanged(); return this; } - + // optional uint64 currentCompactedKVs = 11; private long currentCompactedKVs_ ; + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+       ** the completed count of key values in currently running compaction 
+       * 
+ */ public boolean hasCurrentCompactedKVs() { return ((bitField0_ & 0x00000400) == 0x00000400); } + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+       ** the completed count of key values in currently running compaction 
+       * 
+ */ public long getCurrentCompactedKVs() { return currentCompactedKVs_; } + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+       ** the completed count of key values in currently running compaction 
+       * 
+ */ public Builder setCurrentCompactedKVs(long value) { bitField0_ |= 0x00000400; currentCompactedKVs_ = value; onChanged(); return this; } + /** + * optional uint64 currentCompactedKVs = 11; + * + *
+       ** the completed count of key values in currently running compaction 
+       * 
+ */ public Builder clearCurrentCompactedKVs() { bitField0_ = (bitField0_ & ~0x00000400); currentCompactedKVs_ = 0L; onChanged(); return this; } - + // optional uint32 rootIndexSizeKB = 12; private int rootIndexSizeKB_ ; + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+       ** The current total size of root-level indexes for the region, in KB. 
+       * 
+ */ public boolean hasRootIndexSizeKB() { return ((bitField0_ & 0x00000800) == 0x00000800); } + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+       ** The current total size of root-level indexes for the region, in KB. 
+       * 
+ */ public int getRootIndexSizeKB() { return rootIndexSizeKB_; } + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+       ** The current total size of root-level indexes for the region, in KB. 
+       * 
+ */ public Builder setRootIndexSizeKB(int value) { bitField0_ |= 0x00000800; rootIndexSizeKB_ = value; onChanged(); return this; } + /** + * optional uint32 rootIndexSizeKB = 12; + * + *
+       ** The current total size of root-level indexes for the region, in KB. 
+       * 
+ */ public Builder clearRootIndexSizeKB() { bitField0_ = (bitField0_ & ~0x00000800); rootIndexSizeKB_ = 0; onChanged(); return this; } - + // optional uint32 totalStaticIndexSizeKB = 13; private int totalStaticIndexSizeKB_ ; + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+       ** The total size of all index blocks, not just the root level, in KB. 
+       * 
+ */ public boolean hasTotalStaticIndexSizeKB() { return ((bitField0_ & 0x00001000) == 0x00001000); } + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+       ** The total size of all index blocks, not just the root level, in KB. 
+       * 
+ */ public int getTotalStaticIndexSizeKB() { return totalStaticIndexSizeKB_; } + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+       ** The total size of all index blocks, not just the root level, in KB. 
+       * 
+ */ public Builder setTotalStaticIndexSizeKB(int value) { bitField0_ |= 0x00001000; totalStaticIndexSizeKB_ = value; onChanged(); return this; } + /** + * optional uint32 totalStaticIndexSizeKB = 13; + * + *
+       ** The total size of all index blocks, not just the root level, in KB. 
+       * 
+ */ public Builder clearTotalStaticIndexSizeKB() { bitField0_ = (bitField0_ & ~0x00001000); totalStaticIndexSizeKB_ = 0; onChanged(); return this; } - + // optional uint32 totalStaticBloomSizeKB = 14; private int totalStaticBloomSizeKB_ ; + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+       **
+       * The total size of all Bloom filter blocks, not just loaded into the
+       * block cache, in KB.
+       * 
+ */ public boolean hasTotalStaticBloomSizeKB() { return ((bitField0_ & 0x00002000) == 0x00002000); } + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+       **
+       * The total size of all Bloom filter blocks, not just loaded into the
+       * block cache, in KB.
+       * 
+ */ public int getTotalStaticBloomSizeKB() { return totalStaticBloomSizeKB_; } + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+       **
+       * The total size of all Bloom filter blocks, not just loaded into the
+       * block cache, in KB.
+       * 
+ */ public Builder setTotalStaticBloomSizeKB(int value) { bitField0_ |= 0x00002000; totalStaticBloomSizeKB_ = value; onChanged(); return this; } + /** + * optional uint32 totalStaticBloomSizeKB = 14; + * + *
+       **
+       * The total size of all Bloom filter blocks, not just loaded into the
+       * block cache, in KB.
+       * 
+ */ public Builder clearTotalStaticBloomSizeKB() { bitField0_ = (bitField0_ & ~0x00002000); totalStaticBloomSizeKB_ = 0; onChanged(); return this; } - + // optional uint64 completeSequenceId = 15; private long completeSequenceId_ ; + /** + * optional uint64 completeSequenceId = 15; + * + *
+       ** the most recent sequence Id from cache flush 
+       * 
+ */ public boolean hasCompleteSequenceId() { return ((bitField0_ & 0x00004000) == 0x00004000); } + /** + * optional uint64 completeSequenceId = 15; + * + *
+       ** the most recent sequence Id from cache flush 
+       * 
+ */ public long getCompleteSequenceId() { return completeSequenceId_; } + /** + * optional uint64 completeSequenceId = 15; + * + *
+       ** the most recent sequence Id from cache flush 
+       * 
+ */ public Builder setCompleteSequenceId(long value) { bitField0_ |= 0x00004000; completeSequenceId_ = value; onChanged(); return this; } + /** + * optional uint64 completeSequenceId = 15; + * + *
+       ** the most recent sequence Id from cache flush 
+       * 
+ */ public Builder clearCompleteSequenceId() { bitField0_ = (bitField0_ & ~0x00004000); completeSequenceId_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RegionLoad) } - + static { defaultInstance = new RegionLoad(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionLoad) } - + public interface ServerLoadOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 numberOfRequests = 1; + /** + * optional uint32 numberOfRequests = 1; + * + *
+     ** Number of requests since last report. 
+     * 
+ */ boolean hasNumberOfRequests(); + /** + * optional uint32 numberOfRequests = 1; + * + *
+     ** Number of requests since last report. 
+     * 
+ */ int getNumberOfRequests(); - + // optional uint32 totalNumberOfRequests = 2; + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+     ** Total Number of requests from the start of the region server. 
+     * 
+ */ boolean hasTotalNumberOfRequests(); + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+     ** Total Number of requests from the start of the region server. 
+     * 
+ */ int getTotalNumberOfRequests(); - + // optional uint32 usedHeapMB = 3; + /** + * optional uint32 usedHeapMB = 3; + * + *
+     ** the amount of used heap, in MB. 
+     * 
+ */ boolean hasUsedHeapMB(); + /** + * optional uint32 usedHeapMB = 3; + * + *
+     ** the amount of used heap, in MB. 
+     * 
+ */ int getUsedHeapMB(); - + // optional uint32 maxHeapMB = 4; + /** + * optional uint32 maxHeapMB = 4; + * + *
+     ** the maximum allowable size of the heap, in MB. 
+     * 
+ */ boolean hasMaxHeapMB(); + /** + * optional uint32 maxHeapMB = 4; + * + *
+     ** the maximum allowable size of the heap, in MB. 
+     * 
+ */ int getMaxHeapMB(); - + // repeated .RegionLoad regionLoads = 5; + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ java.util.List getRegionLoadsList(); + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index); + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ int getRegionLoadsCount(); + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ java.util.List getRegionLoadsOrBuilderList(); + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index); - + // repeated .Coprocessor coprocessors = 6; + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ java.util.List getCoprocessorsList(); + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ int getCoprocessorsCount(); + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ java.util.List getCoprocessorsOrBuilderList(); + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index); - + // optional uint64 reportStartTime = 7; + /** + * optional uint64 reportStartTime = 7; + * + *
+     **
+     * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ boolean hasReportStartTime(); + /** + * optional uint64 reportStartTime = 7; + * + *
+     **
+     * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ long getReportStartTime(); - + // optional uint64 reportEndTime = 8; + /** + * optional uint64 reportEndTime = 8; + * + *
+     **
+     * Time when report was generated.
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ boolean hasReportEndTime(); + /** + * optional uint64 reportEndTime = 8; + * + *
+     **
+     * Time when report was generated.
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ long getReportEndTime(); - + // optional uint32 infoServerPort = 9; + /** + * optional uint32 infoServerPort = 9; + * + *
+     **
+     * The port number that this region server is hosing an info server on.
+     * 
+ */ boolean hasInfoServerPort(); + /** + * optional uint32 infoServerPort = 9; + * + *
+     **
+     * The port number that this region server is hosing an info server on.
+     * 
+ */ int getInfoServerPort(); } + /** + * Protobuf type {@code ServerLoad} + */ public static final class ServerLoad extends com.google.protobuf.GeneratedMessage implements ServerLoadOrBuilder { // Use ServerLoad.newBuilder() to construct. - private ServerLoad(Builder builder) { + private ServerLoad(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ServerLoad(boolean noInit) {} - + private ServerLoad(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ServerLoad defaultInstance; public static ServerLoad getDefaultInstance() { return defaultInstance; } - + public ServerLoad getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ServerLoad( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + numberOfRequests_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalNumberOfRequests_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + usedHeapMB_ = input.readUInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + maxHeapMB_ = input.readUInt32(); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + regionLoads_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + regionLoads_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.PARSER, extensionRegistry)); + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + coprocessors_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + coprocessors_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); + break; + } + case 56: { + bitField0_ |= 0x00000010; + reportStartTime_ = input.readUInt64(); + break; + } + case 64: { + bitField0_ |= 0x00000020; + reportEndTime_ = input.readUInt64(); + break; + } + case 72: { + bitField0_ |= 0x00000040; + infoServerPort_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + regionLoads_ = java.util.Collections.unmodifiableList(regionLoads_); + } + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ServerLoad parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerLoad(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional uint32 numberOfRequests = 1; public static final int NUMBEROFREQUESTS_FIELD_NUMBER = 1; private int numberOfRequests_; + /** + * optional uint32 numberOfRequests = 1; + * + *
+     ** Number of requests since last report. 
+     * 
+ */ public boolean hasNumberOfRequests() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 numberOfRequests = 1; + * + *
+     ** Number of requests since last report. 
+     * 
+ */ public int getNumberOfRequests() { return numberOfRequests_; } - + // optional uint32 totalNumberOfRequests = 2; public static final int TOTALNUMBEROFREQUESTS_FIELD_NUMBER = 2; private int totalNumberOfRequests_; + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+     ** Total Number of requests from the start of the region server. 
+     * 
+ */ public boolean hasTotalNumberOfRequests() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+     ** Total Number of requests from the start of the region server. 
+     * 
+ */ public int getTotalNumberOfRequests() { return totalNumberOfRequests_; } - + // optional uint32 usedHeapMB = 3; public static final int USEDHEAPMB_FIELD_NUMBER = 3; private int usedHeapMB_; + /** + * optional uint32 usedHeapMB = 3; + * + *
+     ** the amount of used heap, in MB. 
+     * 
+ */ public boolean hasUsedHeapMB() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint32 usedHeapMB = 3; + * + *
+     ** the amount of used heap, in MB. 
+     * 
+ */ public int getUsedHeapMB() { return usedHeapMB_; } - + // optional uint32 maxHeapMB = 4; public static final int MAXHEAPMB_FIELD_NUMBER = 4; private int maxHeapMB_; + /** + * optional uint32 maxHeapMB = 4; + * + *
+     ** the maximum allowable size of the heap, in MB. 
+     * 
+ */ public boolean hasMaxHeapMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 maxHeapMB = 4; + * + *
+     ** the maximum allowable size of the heap, in MB. 
+     * 
+ */ public int getMaxHeapMB() { return maxHeapMB_; } - + // repeated .RegionLoad regionLoads = 5; public static final int REGIONLOADS_FIELD_NUMBER = 5; private java.util.List regionLoads_; + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ public java.util.List getRegionLoadsList() { return regionLoads_; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ public java.util.List getRegionLoadsOrBuilderList() { return regionLoads_; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ public int getRegionLoadsCount() { return regionLoads_.size(); } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { return regionLoads_.get(index); } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+     ** Information on the load of individual regions. 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index) { return regionLoads_.get(index); } - + // repeated .Coprocessor coprocessors = 6; public static final int COPROCESSORS_FIELD_NUMBER = 6; private java.util.List coprocessors_; + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ public java.util.List getCoprocessorsList() { return coprocessors_; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ public java.util.List getCoprocessorsOrBuilderList() { return coprocessors_; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ public int getCoprocessorsCount() { return coprocessors_.size(); } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { return coprocessors_.get(index); } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+     **
+     * Regionserver-level coprocessors, e.g., WALObserver implementations.
+     * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+     * objects.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index) { return coprocessors_.get(index); } - + // optional uint64 reportStartTime = 7; public static final int REPORTSTARTTIME_FIELD_NUMBER = 7; private long reportStartTime_; + /** + * optional uint64 reportStartTime = 7; + * + *
+     **
+     * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ public boolean hasReportStartTime() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint64 reportStartTime = 7; + * + *
+     **
+     * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ public long getReportStartTime() { return reportStartTime_; } - + // optional uint64 reportEndTime = 8; public static final int REPORTENDTIME_FIELD_NUMBER = 8; private long reportEndTime_; + /** + * optional uint64 reportEndTime = 8; + * + *
+     **
+     * Time when report was generated.
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ public boolean hasReportEndTime() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint64 reportEndTime = 8; + * + *
+     **
+     * Time when report was generated.
+     * time is measured as the difference, measured in milliseconds, between the current time
+     * and midnight, January 1, 1970 UTC.
+     * 
+ */ public long getReportEndTime() { return reportEndTime_; } - + // optional uint32 infoServerPort = 9; public static final int INFOSERVERPORT_FIELD_NUMBER = 9; private int infoServerPort_; + /** + * optional uint32 infoServerPort = 9; + * + *
+     **
+     * The port number that this region server is hosing an info server on.
+     * 
+ */ public boolean hasInfoServerPort() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint32 infoServerPort = 9; + * + *
+     **
+     * The port number that this region server is hosing an info server on.
+     * 
+ */ public int getInfoServerPort() { return infoServerPort_; } - + private void initFields() { numberOfRequests_ = 0; totalNumberOfRequests_ = 0; @@ -5970,7 +8657,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getRegionLoadsCount(); i++) { if (!getRegionLoads(i).isInitialized()) { memoizedIsInitialized = 0; @@ -5986,7 +8673,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6019,12 +8706,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6066,14 +8753,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6083,7 +8770,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) obj; - + boolean result = true; result = result && (hasNumberOfRequests() == other.hasNumberOfRequests()); if (hasNumberOfRequests()) { @@ -6128,9 +8815,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNumberOfRequests()) { @@ -6170,89 +8861,79 @@ public final class HBaseProtos { hash = (53 * hash) + getInfoServerPort(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ServerLoad} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder { @@ -6260,18 +8941,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6284,7 +8968,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); numberOfRequests_ = 0; @@ -6315,20 +8999,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000100); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); if (!result.isInitialized()) { @@ -6336,17 +9020,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad(this); int from_bitField0_ = bitField0_; @@ -6401,7 +9075,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad)other); @@ -6410,7 +9084,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) return this; if (other.hasNumberOfRequests()) { @@ -6489,7 +9163,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getRegionLoadsCount(); i++) { if (!getRegionLoads(i).isInitialized()) { @@ -6505,167 +9179,222 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - numberOfRequests_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - totalNumberOfRequests_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - usedHeapMB_ = input.readUInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - maxHeapMB_ = input.readUInt32(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionLoads(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addCoprocessors(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - reportStartTime_ = input.readUInt64(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - reportEndTime_ = input.readUInt64(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - infoServerPort_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint32 numberOfRequests = 1; private int numberOfRequests_ ; + /** + * optional uint32 numberOfRequests = 1; + * + *
+       ** Number of requests since last report. 
+       * 
+ */ public boolean hasNumberOfRequests() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 numberOfRequests = 1; + * + *
+       ** Number of requests since last report. 
+       * 
+ */ public int getNumberOfRequests() { return numberOfRequests_; } + /** + * optional uint32 numberOfRequests = 1; + * + *
+       ** Number of requests since last report. 
+       * 
+ */ public Builder setNumberOfRequests(int value) { bitField0_ |= 0x00000001; numberOfRequests_ = value; onChanged(); return this; } + /** + * optional uint32 numberOfRequests = 1; + * + *
+       ** Number of requests since last report. 
+       * 
+ */ public Builder clearNumberOfRequests() { bitField0_ = (bitField0_ & ~0x00000001); numberOfRequests_ = 0; onChanged(); return this; } - + // optional uint32 totalNumberOfRequests = 2; private int totalNumberOfRequests_ ; + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+       ** Total Number of requests from the start of the region server. 
+       * 
+ */ public boolean hasTotalNumberOfRequests() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+       ** Total Number of requests from the start of the region server. 
+       * 
+ */ public int getTotalNumberOfRequests() { return totalNumberOfRequests_; } + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+       ** Total Number of requests from the start of the region server. 
+       * 
+ */ public Builder setTotalNumberOfRequests(int value) { bitField0_ |= 0x00000002; totalNumberOfRequests_ = value; onChanged(); return this; } + /** + * optional uint32 totalNumberOfRequests = 2; + * + *
+       ** Total Number of requests from the start of the region server. 
+       * 
+ */ public Builder clearTotalNumberOfRequests() { bitField0_ = (bitField0_ & ~0x00000002); totalNumberOfRequests_ = 0; onChanged(); return this; } - + // optional uint32 usedHeapMB = 3; private int usedHeapMB_ ; + /** + * optional uint32 usedHeapMB = 3; + * + *
+       ** the amount of used heap, in MB. 
+       * 
+ */ public boolean hasUsedHeapMB() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint32 usedHeapMB = 3; + * + *
+       ** the amount of used heap, in MB. 
+       * 
+ */ public int getUsedHeapMB() { return usedHeapMB_; } + /** + * optional uint32 usedHeapMB = 3; + * + *
+       ** the amount of used heap, in MB. 
+       * 
+ */ public Builder setUsedHeapMB(int value) { bitField0_ |= 0x00000004; usedHeapMB_ = value; onChanged(); return this; } + /** + * optional uint32 usedHeapMB = 3; + * + *
+       ** the amount of used heap, in MB. 
+       * 
+ */ public Builder clearUsedHeapMB() { bitField0_ = (bitField0_ & ~0x00000004); usedHeapMB_ = 0; onChanged(); return this; } - + // optional uint32 maxHeapMB = 4; private int maxHeapMB_ ; + /** + * optional uint32 maxHeapMB = 4; + * + *
+       ** the maximum allowable size of the heap, in MB. 
+       * 
+ */ public boolean hasMaxHeapMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint32 maxHeapMB = 4; + * + *
+       ** the maximum allowable size of the heap, in MB. 
+       * 
+ */ public int getMaxHeapMB() { return maxHeapMB_; } + /** + * optional uint32 maxHeapMB = 4; + * + *
+       ** the maximum allowable size of the heap, in MB. 
+       * 
+ */ public Builder setMaxHeapMB(int value) { bitField0_ |= 0x00000008; maxHeapMB_ = value; onChanged(); return this; } + /** + * optional uint32 maxHeapMB = 4; + * + *
+       ** the maximum allowable size of the heap, in MB. 
+       * 
+ */ public Builder clearMaxHeapMB() { bitField0_ = (bitField0_ & ~0x00000008); maxHeapMB_ = 0; onChanged(); return this; } - + // repeated .RegionLoad regionLoads = 5; private java.util.List regionLoads_ = java.util.Collections.emptyList(); @@ -6675,10 +9404,17 @@ public final class HBaseProtos { bitField0_ |= 0x00000010; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder> regionLoadsBuilder_; - + + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public java.util.List getRegionLoadsList() { if (regionLoadsBuilder_ == null) { return java.util.Collections.unmodifiableList(regionLoads_); @@ -6686,6 +9422,13 @@ public final class HBaseProtos { return regionLoadsBuilder_.getMessageList(); } } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public int getRegionLoadsCount() { if (regionLoadsBuilder_ == null) { return regionLoads_.size(); @@ -6693,6 +9436,13 @@ public final class HBaseProtos { return regionLoadsBuilder_.getCount(); } } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { if (regionLoadsBuilder_ == null) { return regionLoads_.get(index); @@ -6700,6 +9450,13 @@ public final class HBaseProtos { return regionLoadsBuilder_.getMessage(index); } } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder setRegionLoads( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { if (regionLoadsBuilder_ == null) { @@ -6714,6 +9471,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder setRegionLoads( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { if (regionLoadsBuilder_ == null) { @@ -6725,6 +9489,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder addRegionLoads(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { if (regionLoadsBuilder_ == null) { if (value == null) { @@ -6738,6 +9509,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder addRegionLoads( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { if (regionLoadsBuilder_ == null) { @@ -6752,6 +9530,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder addRegionLoads( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { if (regionLoadsBuilder_ == null) { @@ -6763,6 +9548,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder addRegionLoads( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { if (regionLoadsBuilder_ == null) { @@ -6774,6 +9566,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder addAllRegionLoads( java.lang.Iterable values) { if (regionLoadsBuilder_ == null) { @@ -6785,6 +9584,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder clearRegionLoads() { if (regionLoadsBuilder_ == null) { regionLoads_ = java.util.Collections.emptyList(); @@ -6795,6 +9601,13 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public Builder removeRegionLoads(int index) { if (regionLoadsBuilder_ == null) { ensureRegionLoadsIsMutable(); @@ -6805,10 +9618,24 @@ public final class HBaseProtos { } return this; } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder getRegionLoadsBuilder( int index) { return getRegionLoadsFieldBuilder().getBuilder(index); } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index) { if (regionLoadsBuilder_ == null) { @@ -6816,6 +9643,13 @@ public final class HBaseProtos { return regionLoadsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public java.util.List getRegionLoadsOrBuilderList() { if (regionLoadsBuilder_ != null) { @@ -6824,15 +9658,36 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(regionLoads_); } } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder() { return getRegionLoadsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder( int index) { return getRegionLoadsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); } + /** + * repeated .RegionLoad regionLoads = 5; + * + *
+       ** Information on the load of individual regions. 
+       * 
+ */ public java.util.List getRegionLoadsBuilderList() { return getRegionLoadsFieldBuilder().getBuilderList(); @@ -6851,7 +9706,7 @@ public final class HBaseProtos { } return regionLoadsBuilder_; } - + // repeated .Coprocessor coprocessors = 6; private java.util.List coprocessors_ = java.util.Collections.emptyList(); @@ -6861,10 +9716,20 @@ public final class HBaseProtos { bitField0_ |= 0x00000020; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; - + + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public java.util.List getCoprocessorsList() { if (coprocessorsBuilder_ == null) { return java.util.Collections.unmodifiableList(coprocessors_); @@ -6872,6 +9737,16 @@ public final class HBaseProtos { return coprocessorsBuilder_.getMessageList(); } } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public int getCoprocessorsCount() { if (coprocessorsBuilder_ == null) { return coprocessors_.size(); @@ -6879,6 +9754,16 @@ public final class HBaseProtos { return coprocessorsBuilder_.getCount(); } } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { if (coprocessorsBuilder_ == null) { return coprocessors_.get(index); @@ -6886,6 +9771,16 @@ public final class HBaseProtos { return coprocessorsBuilder_.getMessage(index); } } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder setCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (coprocessorsBuilder_ == null) { @@ -6900,6 +9795,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder setCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (coprocessorsBuilder_ == null) { @@ -6911,6 +9816,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (coprocessorsBuilder_ == null) { if (value == null) { @@ -6924,6 +9839,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder addCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { if (coprocessorsBuilder_ == null) { @@ -6938,6 +9863,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder addCoprocessors( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (coprocessorsBuilder_ == null) { @@ -6949,6 +9884,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder addCoprocessors( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { if (coprocessorsBuilder_ == null) { @@ -6960,6 +9905,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder addAllCoprocessors( java.lang.Iterable values) { if (coprocessorsBuilder_ == null) { @@ -6971,6 +9926,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder clearCoprocessors() { if (coprocessorsBuilder_ == null) { coprocessors_ = java.util.Collections.emptyList(); @@ -6981,6 +9946,16 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public Builder removeCoprocessors(int index) { if (coprocessorsBuilder_ == null) { ensureCoprocessorsIsMutable(); @@ -6991,10 +9966,30 @@ public final class HBaseProtos { } return this; } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( int index) { return getCoprocessorsFieldBuilder().getBuilder(index); } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index) { if (coprocessorsBuilder_ == null) { @@ -7002,6 +9997,16 @@ public final class HBaseProtos { return coprocessorsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public java.util.List getCoprocessorsOrBuilderList() { if (coprocessorsBuilder_ != null) { @@ -7010,15 +10015,45 @@ public final class HBaseProtos { return java.util.Collections.unmodifiableList(coprocessors_); } } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { return getCoprocessorsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( int index) { return getCoprocessorsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } + /** + * repeated .Coprocessor coprocessors = 6; + * + *
+       **
+       * Regionserver-level coprocessors, e.g., WALObserver implementations.
+       * Region-level coprocessors, on the other hand, are stored inside RegionLoad
+       * objects.
+       * 
+ */ public java.util.List getCoprocessorsBuilderList() { return getCoprocessorsFieldBuilder().getBuilderList(); @@ -7037,141 +10072,357 @@ public final class HBaseProtos { } return coprocessorsBuilder_; } - + // optional uint64 reportStartTime = 7; private long reportStartTime_ ; + /** + * optional uint64 reportStartTime = 7; + * + *
+       **
+       * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public boolean hasReportStartTime() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint64 reportStartTime = 7; + * + *
+       **
+       * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public long getReportStartTime() { return reportStartTime_; } + /** + * optional uint64 reportStartTime = 7; + * + *
+       **
+       * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public Builder setReportStartTime(long value) { bitField0_ |= 0x00000040; reportStartTime_ = value; onChanged(); return this; } + /** + * optional uint64 reportStartTime = 7; + * + *
+       **
+       * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests)
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public Builder clearReportStartTime() { bitField0_ = (bitField0_ & ~0x00000040); reportStartTime_ = 0L; onChanged(); return this; } - + // optional uint64 reportEndTime = 8; private long reportEndTime_ ; + /** + * optional uint64 reportEndTime = 8; + * + *
+       **
+       * Time when report was generated.
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public boolean hasReportEndTime() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint64 reportEndTime = 8; + * + *
+       **
+       * Time when report was generated.
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public long getReportEndTime() { return reportEndTime_; } + /** + * optional uint64 reportEndTime = 8; + * + *
+       **
+       * Time when report was generated.
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public Builder setReportEndTime(long value) { bitField0_ |= 0x00000080; reportEndTime_ = value; onChanged(); return this; } + /** + * optional uint64 reportEndTime = 8; + * + *
+       **
+       * Time when report was generated.
+       * time is measured as the difference, measured in milliseconds, between the current time
+       * and midnight, January 1, 1970 UTC.
+       * 
+ */ public Builder clearReportEndTime() { bitField0_ = (bitField0_ & ~0x00000080); reportEndTime_ = 0L; onChanged(); return this; } - + // optional uint32 infoServerPort = 9; private int infoServerPort_ ; + /** + * optional uint32 infoServerPort = 9; + * + *
+       **
+       * The port number that this region server is hosing an info server on.
+       * 
+ */ public boolean hasInfoServerPort() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint32 infoServerPort = 9; + * + *
+       **
+       * The port number that this region server is hosing an info server on.
+       * 
+ */ public int getInfoServerPort() { return infoServerPort_; } + /** + * optional uint32 infoServerPort = 9; + * + *
+       **
+       * The port number that this region server is hosing an info server on.
+       * 
+ */ public Builder setInfoServerPort(int value) { bitField0_ |= 0x00000100; infoServerPort_ = value; onChanged(); return this; } + /** + * optional uint32 infoServerPort = 9; + * + *
+       **
+       * The port number that this region server is hosing an info server on.
+       * 
+ */ public Builder clearInfoServerPort() { bitField0_ = (bitField0_ & ~0x00000100); infoServerPort_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ServerLoad) } - + static { defaultInstance = new ServerLoad(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ServerLoad) } - + public interface TimeRangeOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint64 from = 1; + /** + * optional uint64 from = 1; + */ boolean hasFrom(); + /** + * optional uint64 from = 1; + */ long getFrom(); - + // optional uint64 to = 2; + /** + * optional uint64 to = 2; + */ boolean hasTo(); + /** + * optional uint64 to = 2; + */ long getTo(); } + /** + * Protobuf type {@code TimeRange} + * + *
+   **
+   * A range of time. Both from and to are Java time
+   * stamp in milliseconds. If you don't specify a time
+   * range, it means all time.  By default, if not
+   * specified, from = 0, and to = Long.MAX_VALUE
+   * 
+ */ public static final class TimeRange extends com.google.protobuf.GeneratedMessage implements TimeRangeOrBuilder { // Use TimeRange.newBuilder() to construct. - private TimeRange(Builder builder) { + private TimeRange(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TimeRange(boolean noInit) {} - + private TimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TimeRange defaultInstance; public static TimeRange getDefaultInstance() { return defaultInstance; } - + public TimeRange getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TimeRange( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + from_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + to_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TimeRange parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeRange(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional uint64 from = 1; public static final int FROM_FIELD_NUMBER = 1; private long from_; + /** + * optional uint64 from = 1; + */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint64 from = 1; + */ public long getFrom() { return from_; } - + // optional uint64 to = 2; public static final int TO_FIELD_NUMBER = 2; private long to_; + /** + * optional uint64 to = 2; + */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 to = 2; + */ public long getTo() { return to_; } - + private void initFields() { from_ = 0L; to_ = 0L; @@ -7180,11 +10431,11 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7196,12 +10447,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7215,14 +10466,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7232,7 +10483,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; - + boolean result = true; result = result && (hasFrom() == other.hasFrom()); if (hasFrom()) { @@ -7248,9 +10499,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFrom()) { @@ -7262,89 +10517,87 @@ public final class HBaseProtos { hash = (53 * hash) + hashLong(getTo()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TimeRange} + * + *
+     **
+     * A range of time. Both from and to are Java time
+     * stamp in milliseconds. If you don't specify a time
+     * range, it means all time.  By default, if not
+     * specified, from = 0, and to = Long.MAX_VALUE
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { @@ -7352,18 +10605,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7374,7 +10630,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); from_ = 0L; @@ -7383,20 +10639,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); if (!result.isInitialized()) { @@ -7404,17 +10660,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); int from_bitField0_ = bitField0_; @@ -7431,7 +10677,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); @@ -7440,7 +10686,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; if (other.hasFrom()) { @@ -7452,185 +10698,295 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - from_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - to_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint64 from = 1; private long from_ ; + /** + * optional uint64 from = 1; + */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint64 from = 1; + */ public long getFrom() { return from_; } + /** + * optional uint64 from = 1; + */ public Builder setFrom(long value) { bitField0_ |= 0x00000001; from_ = value; onChanged(); return this; } + /** + * optional uint64 from = 1; + */ public Builder clearFrom() { bitField0_ = (bitField0_ & ~0x00000001); from_ = 0L; onChanged(); return this; } - + // optional uint64 to = 2; private long to_ ; + /** + * optional uint64 to = 2; + */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 to = 2; + */ public long getTo() { return to_; } + /** + * optional uint64 to = 2; + */ public Builder setTo(long value) { bitField0_ |= 0x00000002; to_ = value; onChanged(); return this; } + /** + * optional uint64 to = 2; + */ public Builder clearTo() { bitField0_ = (bitField0_ & ~0x00000002); to_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TimeRange) } - + static { defaultInstance = new TimeRange(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TimeRange) } - + public interface FilterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); - + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // optional bytes serializedFilter = 2; + /** + * optional bytes serializedFilter = 2; + */ boolean hasSerializedFilter(); + /** + * optional bytes serializedFilter = 2; + */ com.google.protobuf.ByteString getSerializedFilter(); } + /** + * Protobuf type {@code Filter} + */ public static final class Filter extends com.google.protobuf.GeneratedMessage implements FilterOrBuilder { // Use Filter.newBuilder() to construct. - private Filter(Builder builder) { + private Filter(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Filter(boolean noInit) {} - + private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Filter defaultInstance; public static Filter getDefaultInstance() { return defaultInstance; } - + public Filter getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Filter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serializedFilter_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Filter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Filter(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bytes serializedFilter = 2; public static final int SERIALIZEDFILTER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString serializedFilter_; + /** + * optional bytes serializedFilter = 2; + */ public boolean hasSerializedFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes serializedFilter = 2; + */ public com.google.protobuf.ByteString getSerializedFilter() { return serializedFilter_; } - + private void initFields() { name_ = ""; serializedFilter_ = com.google.protobuf.ByteString.EMPTY; @@ -7639,7 +10995,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -7647,7 +11003,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7659,12 +11015,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7678,14 +11034,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7695,7 +11051,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -7711,9 +11067,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -7725,89 +11085,79 @@ public final class HBaseProtos { hash = (53 * hash) + getSerializedFilter().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Filter} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder { @@ -7815,18 +11165,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7837,7 +11190,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -7846,20 +11199,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); if (!result.isInitialized()) { @@ -7867,17 +11220,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter(this); int from_bitField0_ = bitField0_; @@ -7894,7 +11237,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter)other); @@ -7903,11 +11246,13 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasSerializedFilter()) { setSerializedFilter(other.getSerializedFilter()); @@ -7915,7 +11260,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -7923,62 +11268,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serializedFilter_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -7987,26 +11339,46 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // optional bytes serializedFilter = 2; private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes serializedFilter = 2; + */ public boolean hasSerializedFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes serializedFilter = 2; + */ public com.google.protobuf.ByteString getSerializedFilter() { return serializedFilter_; } + /** + * optional bytes serializedFilter = 2; + */ public Builder setSerializedFilter(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8016,140 +11388,319 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes serializedFilter = 2; + */ public Builder clearSerializedFilter() { bitField0_ = (bitField0_ & ~0x00000002); serializedFilter_ = getDefaultInstance().getSerializedFilter(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Filter) } - + static { defaultInstance = new Filter(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Filter) } - + public interface KeyValueOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // required bytes family = 2; + /** + * required bytes family = 2; + */ boolean hasFamily(); + /** + * required bytes family = 2; + */ com.google.protobuf.ByteString getFamily(); - + // required bytes qualifier = 3; + /** + * required bytes qualifier = 3; + */ boolean hasQualifier(); + /** + * required bytes qualifier = 3; + */ com.google.protobuf.ByteString getQualifier(); - + // optional uint64 timestamp = 4; + /** + * optional uint64 timestamp = 4; + */ boolean hasTimestamp(); + /** + * optional uint64 timestamp = 4; + */ long getTimestamp(); - + // optional .CellType keyType = 5; + /** + * optional .CellType keyType = 5; + */ boolean hasKeyType(); + /** + * optional .CellType keyType = 5; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getKeyType(); - + // optional bytes value = 6; + /** + * optional bytes value = 6; + */ boolean hasValue(); + /** + * optional bytes value = 6; + */ com.google.protobuf.ByteString getValue(); } + /** + * Protobuf type {@code KeyValue} + * + *
+   **
+   * Protocol buffer version of KeyValue.
+   * It doesn't have those transient parameters
+   * 
+ */ public static final class KeyValue extends com.google.protobuf.GeneratedMessage implements KeyValueOrBuilder { // Use KeyValue.newBuilder() to construct. - private KeyValue(Builder builder) { + private KeyValue(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private KeyValue(boolean noInit) {} - + private KeyValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final KeyValue defaultInstance; public static KeyValue getDefaultInstance() { return defaultInstance; } - + public KeyValue getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private KeyValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + timestamp_ = input.readUInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + keyType_ = value; + } + break; + } + case 50: { + bitField0_ |= 0x00000020; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public KeyValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new KeyValue(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // required bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // required bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; + /** + * required bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } - + // optional .CellType keyType = 5; public static final int KEYTYPE_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType keyType_; + /** + * optional .CellType keyType = 5; + */ public boolean hasKeyType() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellType keyType = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getKeyType() { return keyType_; } - + // optional bytes value = 6; public static final int VALUE_FIELD_NUMBER = 6; private com.google.protobuf.ByteString value_; + /** + * optional bytes value = 6; + */ public boolean hasValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bytes value = 6; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; family_ = com.google.protobuf.ByteString.EMPTY; @@ -8162,7 +11713,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -8178,7 +11729,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8202,12 +11753,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8237,14 +11788,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8254,7 +11805,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -8290,9 +11841,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -8320,89 +11875,85 @@ public final class HBaseProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code KeyValue} + * + *
+     **
+     * Protocol buffer version of KeyValue.
+     * It doesn't have those transient parameters
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder { @@ -8410,18 +11961,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8432,7 +11986,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -8449,20 +12003,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000020); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); if (!result.isInitialized()) { @@ -8470,17 +12024,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue(this); int from_bitField0_ = bitField0_; @@ -8513,7 +12057,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)other); @@ -8522,7 +12066,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()) return this; if (other.hasRow()) { @@ -8546,7 +12090,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -8562,80 +12106,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - timestamp_ = input.readUInt64(); - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - keyType_ = value; - } - break; - } - case 50: { - bitField0_ |= 0x00000020; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8645,21 +12152,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // required bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 2; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes family = 2; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 2; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8669,21 +12188,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes family = 2; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000002); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // required bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes qualifier = 3; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required bytes qualifier = 3; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * required bytes qualifier = 3; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8693,42 +12224,66 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes qualifier = 3; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000004); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // optional uint64 timestamp = 4; private long timestamp_ ; + /** + * optional uint64 timestamp = 4; + */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 timestamp = 4; + */ public long getTimestamp() { return timestamp_; } + /** + * optional uint64 timestamp = 4; + */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000008; timestamp_ = value; onChanged(); return this; } + /** + * optional uint64 timestamp = 4; + */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000008); timestamp_ = 0L; onChanged(); return this; } - + // optional .CellType keyType = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM; + /** + * optional .CellType keyType = 5; + */ public boolean hasKeyType() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellType keyType = 5; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType getKeyType() { return keyType_; } + /** + * optional .CellType keyType = 5; + */ public Builder setKeyType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType value) { if (value == null) { throw new NullPointerException(); @@ -8738,21 +12293,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional .CellType keyType = 5; + */ public Builder clearKeyType() { bitField0_ = (bitField0_ & ~0x00000010); keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CellType.MINIMUM; onChanged(); return this; } - + // optional bytes value = 6; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 6; + */ public boolean hasValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional bytes value = 6; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * optional bytes value = 6; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8762,120 +12329,251 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes value = 6; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000020); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:KeyValue) } - + static { defaultInstance = new KeyValue(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:KeyValue) } - + public interface ServerNameOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string hostName = 1; + /** + * required string hostName = 1; + */ boolean hasHostName(); - String getHostName(); - + /** + * required string hostName = 1; + */ + java.lang.String getHostName(); + /** + * required string hostName = 1; + */ + com.google.protobuf.ByteString + getHostNameBytes(); + // optional uint32 port = 2; + /** + * optional uint32 port = 2; + */ boolean hasPort(); + /** + * optional uint32 port = 2; + */ int getPort(); - + // optional uint64 startCode = 3; + /** + * optional uint64 startCode = 3; + */ boolean hasStartCode(); + /** + * optional uint64 startCode = 3; + */ long getStartCode(); } + /** + * Protobuf type {@code ServerName} + * + *
+   **
+   * Protocol buffer version of ServerName
+   * 
+ */ public static final class ServerName extends com.google.protobuf.GeneratedMessage implements ServerNameOrBuilder { // Use ServerName.newBuilder() to construct. - private ServerName(Builder builder) { + private ServerName(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ServerName(boolean noInit) {} - + private ServerName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ServerName defaultInstance; public static ServerName getDefaultInstance() { return defaultInstance; } - + public ServerName getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ServerName( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + hostName_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + port_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + startCode_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ServerName parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerName(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string hostName = 1; public static final int HOSTNAME_FIELD_NUMBER = 1; private java.lang.Object hostName_; + /** + * required string hostName = 1; + */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getHostName() { + /** + * required string hostName = 1; + */ + public java.lang.String getHostName() { java.lang.Object ref = hostName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { hostName_ = s; } return s; } } - private com.google.protobuf.ByteString getHostNameBytes() { + /** + * required string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { java.lang.Object ref = hostName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); hostName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional uint32 port = 2; public static final int PORT_FIELD_NUMBER = 2; private int port_; + /** + * optional uint32 port = 2; + */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 port = 2; + */ public int getPort() { return port_; } - + // optional uint64 startCode = 3; public static final int STARTCODE_FIELD_NUMBER = 3; private long startCode_; + /** + * optional uint64 startCode = 3; + */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 startCode = 3; + */ public long getStartCode() { return startCode_; } - + private void initFields() { hostName_ = ""; port_ = 0; @@ -8885,7 +12583,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasHostName()) { memoizedIsInitialized = 0; return false; @@ -8893,7 +12591,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8908,12 +12606,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8931,14 +12629,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8948,7 +12646,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; - + boolean result = true; result = result && (hasHostName() == other.hasHostName()); if (hasHostName()) { @@ -8969,9 +12667,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHostName()) { @@ -8987,89 +12689,84 @@ public final class HBaseProtos { hash = (53 * hash) + hashLong(getStartCode()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ServerName} + * + *
+     **
+     * Protocol buffer version of ServerName
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { @@ -9077,18 +12774,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9099,7 +12799,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); hostName_ = ""; @@ -9110,20 +12810,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); if (!result.isInitialized()) { @@ -9131,17 +12831,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); int from_bitField0_ = bitField0_; @@ -9162,7 +12852,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); @@ -9171,11 +12861,13 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; if (other.hasHostName()) { - setHostName(other.getHostName()); + bitField0_ |= 0x00000001; + hostName_ = other.hostName_; + onChanged(); } if (other.hasPort()) { setPort(other.getPort()); @@ -9186,7 +12878,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasHostName()) { @@ -9194,67 +12886,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - hostName_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - port_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - startCode_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string hostName = 1; private java.lang.Object hostName_ = ""; + /** + * required string hostName = 1; + */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getHostName() { + /** + * required string hostName = 1; + */ + public java.lang.String getHostName() { java.lang.Object ref = hostName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); hostName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setHostName(String value) { + /** + * required string hostName = 1; + */ + public Builder setHostName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -9263,139 +12957,263 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string hostName = 1; + */ public Builder clearHostName() { bitField0_ = (bitField0_ & ~0x00000001); hostName_ = getDefaultInstance().getHostName(); onChanged(); return this; } - void setHostName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string hostName = 1; + */ + public Builder setHostNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; hostName_ = value; onChanged(); + return this; } - + // optional uint32 port = 2; private int port_ ; + /** + * optional uint32 port = 2; + */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 port = 2; + */ public int getPort() { return port_; } + /** + * optional uint32 port = 2; + */ public Builder setPort(int value) { bitField0_ |= 0x00000002; port_ = value; onChanged(); return this; } + /** + * optional uint32 port = 2; + */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000002); port_ = 0; onChanged(); return this; } - + // optional uint64 startCode = 3; private long startCode_ ; + /** + * optional uint64 startCode = 3; + */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 startCode = 3; + */ public long getStartCode() { return startCode_; } + /** + * optional uint64 startCode = 3; + */ public Builder setStartCode(long value) { bitField0_ |= 0x00000004; startCode_ = value; onChanged(); return this; } + /** + * optional uint64 startCode = 3; + */ public Builder clearStartCode() { bitField0_ = (bitField0_ & ~0x00000004); startCode_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ServerName) } - + static { defaultInstance = new ServerName(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ServerName) } - + public interface CoprocessorOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); } + /** + * Protobuf type {@code Coprocessor} + */ public static final class Coprocessor extends com.google.protobuf.GeneratedMessage implements CoprocessorOrBuilder { // Use Coprocessor.newBuilder() to construct. - private Coprocessor(Builder builder) { + private Coprocessor(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Coprocessor(boolean noInit) {} - + private Coprocessor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Coprocessor defaultInstance; public static Coprocessor getDefaultInstance() { return defaultInstance; } - + public Coprocessor getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Coprocessor( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Coprocessor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Coprocessor(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { name_ = ""; } @@ -9403,7 +13221,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -9411,7 +13229,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9420,12 +13238,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9435,14 +13253,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9452,7 +13270,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -9463,9 +13281,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -9473,89 +13295,79 @@ public final class HBaseProtos { hash = (53 * hash) + getName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Coprocessor} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { @@ -9563,18 +13375,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9585,27 +13400,27 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); if (!result.isInitialized()) { @@ -9613,17 +13428,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor(this); int from_bitField0_ = bitField0_; @@ -9636,7 +13441,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)other); @@ -9645,16 +13450,18 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -9662,57 +13469,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -9721,133 +13540,260 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:Coprocessor) } - + static { defaultInstance = new Coprocessor(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Coprocessor) } - + public interface NameStringPairOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); - + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // required string value = 2; + /** + * required string value = 2; + */ boolean hasValue(); - String getValue(); + /** + * required string value = 2; + */ + java.lang.String getValue(); + /** + * required string value = 2; + */ + com.google.protobuf.ByteString + getValueBytes(); } + /** + * Protobuf type {@code NameStringPair} + */ public static final class NameStringPair extends com.google.protobuf.GeneratedMessage implements NameStringPairOrBuilder { // Use NameStringPair.newBuilder() to construct. - private NameStringPair(Builder builder) { + private NameStringPair(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NameStringPair(boolean noInit) {} - + private NameStringPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NameStringPair defaultInstance; public static NameStringPair getDefaultInstance() { return defaultInstance; } - + public NameStringPair getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NameStringPair( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NameStringPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameStringPair(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // required string value = 2; public static final int VALUE_FIELD_NUMBER = 2; private java.lang.Object value_; + /** + * required string value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getValue() { + /** + * required string value = 2; + */ + public java.lang.String getValue() { java.lang.Object ref = value_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { value_ = s; } return s; } } - private com.google.protobuf.ByteString getValueBytes() { + /** + * required string value = 2; + */ + public com.google.protobuf.ByteString + getValueBytes() { java.lang.Object ref = value_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); value_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { name_ = ""; value_ = ""; @@ -9856,7 +13802,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -9868,7 +13814,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9880,12 +13826,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9899,14 +13845,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9916,7 +13862,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -9932,9 +13878,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -9946,89 +13896,79 @@ public final class HBaseProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NameStringPair} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { @@ -10036,18 +13976,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10058,7 +14001,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -10067,20 +14010,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); if (!result.isInitialized()) { @@ -10088,17 +14031,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); int from_bitField0_ = bitField0_; @@ -10115,7 +14048,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); @@ -10124,19 +14057,23 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasValue()) { - setValue(other.getValue()); + bitField0_ |= 0x00000002; + value_ = other.value_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -10148,62 +14085,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -10212,34 +14156,72 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // required string value = 2; private java.lang.Object value_ = ""; + /** + * required string value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getValue() { + /** + * required string value = 2; + */ + public java.lang.String getValue() { java.lang.Object ref = value_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); value_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string value = 2; + */ + public com.google.protobuf.ByteString + getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setValue(String value) { + /** + * required string value = 2; + */ + public Builder setValue( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -10248,111 +14230,228 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string value = 2; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - void setValue(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * required string value = 2; + */ + public Builder setValueBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; value_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:NameStringPair) } - + static { defaultInstance = new NameStringPair(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NameStringPair) } - + public interface NameBytesPairOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); - + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // optional bytes value = 2; + /** + * optional bytes value = 2; + */ boolean hasValue(); + /** + * optional bytes value = 2; + */ com.google.protobuf.ByteString getValue(); } + /** + * Protobuf type {@code NameBytesPair} + */ public static final class NameBytesPair extends com.google.protobuf.GeneratedMessage implements NameBytesPairOrBuilder { // Use NameBytesPair.newBuilder() to construct. - private NameBytesPair(Builder builder) { + private NameBytesPair(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NameBytesPair(boolean noInit) {} - + private NameBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NameBytesPair defaultInstance; public static NameBytesPair getDefaultInstance() { return defaultInstance; } - + public NameBytesPair getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NameBytesPair( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NameBytesPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameBytesPair(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; + /** + * optional bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } - + private void initFields() { name_ = ""; value_ = com.google.protobuf.ByteString.EMPTY; @@ -10361,7 +14460,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -10369,7 +14468,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10381,12 +14480,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -10400,14 +14499,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10417,7 +14516,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -10433,9 +14532,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -10447,89 +14550,79 @@ public final class HBaseProtos { hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NameBytesPair} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { @@ -10537,18 +14630,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10559,7 +14655,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -10568,20 +14664,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); if (!result.isInitialized()) { @@ -10589,17 +14685,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); int from_bitField0_ = bitField0_; @@ -10616,7 +14702,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); @@ -10625,11 +14711,13 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasValue()) { setValue(other.getValue()); @@ -10637,7 +14725,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -10645,62 +14733,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -10709,26 +14804,46 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // optional bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes value = 2; + */ public com.google.protobuf.ByteString getValue() { return value_; } + /** + * optional bytes value = 2; + */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10738,84 +14853,183 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional bytes value = 2; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:NameBytesPair) } - + static { defaultInstance = new NameBytesPair(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NameBytesPair) } - + public interface BytesBytesPairOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes first = 1; + /** + * required bytes first = 1; + */ boolean hasFirst(); + /** + * required bytes first = 1; + */ com.google.protobuf.ByteString getFirst(); - + // required bytes second = 2; + /** + * required bytes second = 2; + */ boolean hasSecond(); + /** + * required bytes second = 2; + */ com.google.protobuf.ByteString getSecond(); } + /** + * Protobuf type {@code BytesBytesPair} + */ public static final class BytesBytesPair extends com.google.protobuf.GeneratedMessage implements BytesBytesPairOrBuilder { // Use BytesBytesPair.newBuilder() to construct. - private BytesBytesPair(Builder builder) { + private BytesBytesPair(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BytesBytesPair(boolean noInit) {} - + private BytesBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BytesBytesPair defaultInstance; public static BytesBytesPair getDefaultInstance() { return defaultInstance; } - + public BytesBytesPair getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BytesBytesPair( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + first_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + second_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BytesBytesPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BytesBytesPair(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes first = 1; public static final int FIRST_FIELD_NUMBER = 1; private com.google.protobuf.ByteString first_; + /** + * required bytes first = 1; + */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes first = 1; + */ public com.google.protobuf.ByteString getFirst() { return first_; } - + // required bytes second = 2; public static final int SECOND_FIELD_NUMBER = 2; private com.google.protobuf.ByteString second_; + /** + * required bytes second = 2; + */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes second = 2; + */ public com.google.protobuf.ByteString getSecond() { return second_; } - + private void initFields() { first_ = com.google.protobuf.ByteString.EMPTY; second_ = com.google.protobuf.ByteString.EMPTY; @@ -10824,7 +15038,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFirst()) { memoizedIsInitialized = 0; return false; @@ -10836,7 +15050,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -10848,12 +15062,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -10867,14 +15081,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10884,7 +15098,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) obj; - + boolean result = true; result = result && (hasFirst() == other.hasFirst()); if (hasFirst()) { @@ -10900,9 +15114,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFirst()) { @@ -10914,89 +15132,79 @@ public final class HBaseProtos { hash = (53 * hash) + getSecond().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BytesBytesPair} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { @@ -11004,18 +15212,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11026,7 +15237,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); first_ = com.google.protobuf.ByteString.EMPTY; @@ -11035,20 +15246,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); if (!result.isInitialized()) { @@ -11056,17 +15267,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair(this); int from_bitField0_ = bitField0_; @@ -11083,7 +15284,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)other); @@ -11092,7 +15293,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; if (other.hasFirst()) { @@ -11104,7 +15305,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFirst()) { @@ -11116,54 +15317,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - first_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - second_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes first = 1; private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes first = 1; + */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes first = 1; + */ public com.google.protobuf.ByteString getFirst() { return first_; } + /** + * required bytes first = 1; + */ public Builder setFirst(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11173,21 +15363,33 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes first = 1; + */ public Builder clearFirst() { bitField0_ = (bitField0_ & ~0x00000001); first_ = getDefaultInstance().getFirst(); onChanged(); return this; } - + // required bytes second = 2; private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes second = 2; + */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes second = 2; + */ public com.google.protobuf.ByteString getSecond() { return second_; } + /** + * required bytes second = 2; + */ public Builder setSecond(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -11197,106 +15399,215 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes second = 2; + */ public Builder clearSecond() { bitField0_ = (bitField0_ & ~0x00000002); second_ = getDefaultInstance().getSecond(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BytesBytesPair) } - + static { defaultInstance = new BytesBytesPair(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BytesBytesPair) } - + public interface NameInt64PairOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string name = 1; + /** + * optional string name = 1; + */ boolean hasName(); - String getName(); - + /** + * optional string name = 1; + */ + java.lang.String getName(); + /** + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // optional int64 value = 2; + /** + * optional int64 value = 2; + */ boolean hasValue(); + /** + * optional int64 value = 2; + */ long getValue(); } + /** + * Protobuf type {@code NameInt64Pair} + */ public static final class NameInt64Pair extends com.google.protobuf.GeneratedMessage implements NameInt64PairOrBuilder { // Use NameInt64Pair.newBuilder() to construct. - private NameInt64Pair(Builder builder) { + private NameInt64Pair(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NameInt64Pair(boolean noInit) {} - + private NameInt64Pair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NameInt64Pair defaultInstance; public static NameInt64Pair getDefaultInstance() { return defaultInstance; } - + public NameInt64Pair getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NameInt64Pair( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + value_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NameInt64Pair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameInt64Pair(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * optional string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * optional string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional int64 value = 2; public static final int VALUE_FIELD_NUMBER = 2; private long value_; + /** + * optional int64 value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int64 value = 2; + */ public long getValue() { return value_; } - + private void initFields() { name_ = ""; value_ = 0L; @@ -11305,11 +15616,11 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11321,12 +15632,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -11340,14 +15651,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11357,7 +15668,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -11373,9 +15684,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -11387,89 +15702,79 @@ public final class HBaseProtos { hash = (53 * hash) + hashLong(getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NameInt64Pair} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { @@ -11477,18 +15782,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11499,7 +15807,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -11508,20 +15816,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = buildPartial(); if (!result.isInitialized()) { @@ -11529,17 +15837,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair(this); int from_bitField0_ = bitField0_; @@ -11556,7 +15854,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair)other); @@ -11565,11 +15863,13 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasValue()) { setValue(other.getValue()); @@ -11577,66 +15877,73 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - value_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string name = 1; private java.lang.Object name_ = ""; + /** + * optional string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * optional string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -11645,113 +15952,301 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // optional int64 value = 2; private long value_ ; + /** + * optional int64 value = 2; + */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int64 value = 2; + */ public long getValue() { return value_; } + /** + * optional int64 value = 2; + */ public Builder setValue(long value) { bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } + /** + * optional int64 value = 2; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:NameInt64Pair) } - + static { defaultInstance = new NameInt64Pair(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NameInt64Pair) } - + public interface SnapshotDescriptionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string name = 1; + /** + * required string name = 1; + */ boolean hasName(); - String getName(); - + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + // optional string table = 2; + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ boolean hasTable(); - String getTable(); - + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + java.lang.String getTable(); + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + com.google.protobuf.ByteString + getTableBytes(); + // optional int64 creationTime = 3 [default = 0]; + /** + * optional int64 creationTime = 3 [default = 0]; + */ boolean hasCreationTime(); + /** + * optional int64 creationTime = 3 [default = 0]; + */ long getCreationTime(); - + // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ boolean hasType(); + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); - + // optional int32 version = 5; + /** + * optional int32 version = 5; + */ boolean hasVersion(); + /** + * optional int32 version = 5; + */ int getVersion(); } + /** + * Protobuf type {@code SnapshotDescription} + * + *
+   **
+   * Description of the snapshot to take
+   * 
+ */ public static final class SnapshotDescription extends com.google.protobuf.GeneratedMessage implements SnapshotDescriptionOrBuilder { // Use SnapshotDescription.newBuilder() to construct. - private SnapshotDescription(Builder builder) { + private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SnapshotDescription(boolean noInit) {} - + private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SnapshotDescription defaultInstance; public static SnapshotDescription getDefaultInstance() { return defaultInstance; } - + public SnapshotDescription getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SnapshotDescription( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + table_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + creationTime_ = input.readInt64(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + type_ = value; + } + break; + } + case 40: { + bitField0_ |= 0x00000010; + version_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SnapshotDescription parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotDescription(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code SnapshotDescription.Type} + */ public enum Type implements com.google.protobuf.ProtocolMessageEnum { + /** + * DISABLED = 0; + */ DISABLED(0, 0), + /** + * FLUSH = 1; + */ FLUSH(1, 1), ; - + + /** + * DISABLED = 0; + */ public static final int DISABLED_VALUE = 0; + /** + * FLUSH = 1; + */ public static final int FLUSH_VALUE = 1; - - + + public final int getNumber() { return value; } - + public static Type valueOf(int value) { switch (value) { case 0: return DISABLED; @@ -11759,7 +16254,7 @@ public final class HBaseProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -11771,7 +16266,7 @@ public final class HBaseProtos { return Type.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -11784,11 +16279,9 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0); } - - private static final Type[] VALUES = { - DISABLED, FLUSH, - }; - + + private static final Type[] VALUES = values(); + public static Type valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -11797,113 +16290,165 @@ public final class HBaseProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private Type(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:SnapshotDescription.Type) } - + private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string table = 2; public static final int TABLE_FIELD_NUMBER = 2; private java.lang.Object table_; + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getTable() { + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + public java.lang.String getTable() { java.lang.Object ref = table_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { table_ = s; } return s; } } - private com.google.protobuf.ByteString getTableBytes() { + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + public com.google.protobuf.ByteString + getTableBytes() { java.lang.Object ref = table_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); table_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional int64 creationTime = 3 [default = 0]; public static final int CREATIONTIME_FIELD_NUMBER = 3; private long creationTime_; + /** + * optional int64 creationTime = 3 [default = 0]; + */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional int64 creationTime = 3 [default = 0]; + */ public long getCreationTime() { return creationTime_; } - + // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; public static final int TYPE_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_; + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { return type_; } - + // optional int32 version = 5; public static final int VERSION_FIELD_NUMBER = 5; private int version_; + /** + * optional int32 version = 5; + */ public boolean hasVersion() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional int32 version = 5; + */ public int getVersion() { return version_; } - + private void initFields() { name_ = ""; table_ = ""; @@ -11915,7 +16460,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasName()) { memoizedIsInitialized = 0; return false; @@ -11923,7 +16468,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -11944,12 +16489,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -11975,14 +16520,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11992,7 +16537,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -12023,9 +16568,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -12049,89 +16598,84 @@ public final class HBaseProtos { hash = (53 * hash) + getVersion(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SnapshotDescription} + * + *
+     **
+     * Description of the snapshot to take
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { @@ -12139,18 +16683,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12161,7 +16708,7 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; @@ -12176,20 +16723,20 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial(); if (!result.isInitialized()) { @@ -12197,17 +16744,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this); int from_bitField0_ = bitField0_; @@ -12236,7 +16773,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other); @@ -12245,14 +16782,18 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } if (other.hasTable()) { - setTable(other.getTable()); + bitField0_ |= 0x00000002; + table_ = other.table_; + onChanged(); } if (other.hasCreationTime()) { setCreationTime(other.getCreationTime()); @@ -12266,7 +16807,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasName()) { @@ -12274,83 +16815,69 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - table_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - creationTime_ = input.readInt64(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - type_ = value; - } - break; - } - case 40: { - bitField0_ |= 0x00000010; - version_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string name = 1; private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * required string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -12359,34 +16886,88 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // optional string table = 2; private java.lang.Object table_ = ""; + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getTable() { + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public java.lang.String getTable() { java.lang.Object ref = table_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); table_ = s; return s; } else { - return (String) ref; - } - } - public Builder setTable(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public Builder setTable( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -12395,47 +16976,87 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ public Builder clearTable() { bitField0_ = (bitField0_ & ~0x00000002); table_ = getDefaultInstance().getTable(); onChanged(); return this; } - void setTable(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public Builder setTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; table_ = value; onChanged(); + return this; } - + // optional int64 creationTime = 3 [default = 0]; private long creationTime_ ; + /** + * optional int64 creationTime = 3 [default = 0]; + */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional int64 creationTime = 3 [default = 0]; + */ public long getCreationTime() { return creationTime_; } + /** + * optional int64 creationTime = 3 [default = 0]; + */ public Builder setCreationTime(long value) { bitField0_ |= 0x00000004; creationTime_ = value; onChanged(); return this; } + /** + * optional int64 creationTime = 3 [default = 0]; + */ public Builder clearCreationTime() { bitField0_ = (bitField0_ & ~0x00000004); creationTime_ = 0L; onChanged(); return this; } - + // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { return type_; } + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) { if (value == null) { throw new NullPointerException(); @@ -12445,111 +17066,187 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * optional .SnapshotDescription.Type type = 4 [default = FLUSH]; + */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; onChanged(); return this; } - + // optional int32 version = 5; private int version_ ; + /** + * optional int32 version = 5; + */ public boolean hasVersion() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional int32 version = 5; + */ public int getVersion() { return version_; } + /** + * optional int32 version = 5; + */ public Builder setVersion(int value) { bitField0_ |= 0x00000010; version_ = value; onChanged(); return this; } + /** + * optional int32 version = 5; + */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000010); version_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SnapshotDescription) } - + static { defaultInstance = new SnapshotDescription(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SnapshotDescription) } - + public interface EmptyMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code EmptyMsg} + */ public static final class EmptyMsg extends com.google.protobuf.GeneratedMessage implements EmptyMsgOrBuilder { // Use EmptyMsg.newBuilder() to construct. - private EmptyMsg(Builder builder) { + private EmptyMsg(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EmptyMsg(boolean noInit) {} - + private EmptyMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EmptyMsg defaultInstance; public static EmptyMsg getDefaultInstance() { return defaultInstance; } - + public EmptyMsg getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EmptyMsg( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EmptyMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyMsg(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12559,101 +17256,95 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EmptyMsg} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { @@ -12661,18 +17352,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12683,25 +17377,25 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); if (!result.isInitialized()) { @@ -12709,23 +17403,13 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg)other); @@ -12734,102 +17418,171 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:EmptyMsg) } - + static { defaultInstance = new EmptyMsg(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EmptyMsg) } - + public interface LongMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int64 longMsg = 1; + /** + * required int64 longMsg = 1; + */ boolean hasLongMsg(); + /** + * required int64 longMsg = 1; + */ long getLongMsg(); } + /** + * Protobuf type {@code LongMsg} + */ public static final class LongMsg extends com.google.protobuf.GeneratedMessage implements LongMsgOrBuilder { // Use LongMsg.newBuilder() to construct. - private LongMsg(Builder builder) { + private LongMsg(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private LongMsg(boolean noInit) {} - + private LongMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final LongMsg defaultInstance; public static LongMsg getDefaultInstance() { return defaultInstance; } - + public LongMsg getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private LongMsg( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + longMsg_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public LongMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LongMsg(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int64 longMsg = 1; public static final int LONGMSG_FIELD_NUMBER = 1; private long longMsg_; + /** + * required int64 longMsg = 1; + */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 longMsg = 1; + */ public long getLongMsg() { return longMsg_; } - + private void initFields() { longMsg_ = 0L; } @@ -12837,7 +17590,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLongMsg()) { memoizedIsInitialized = 0; return false; @@ -12845,7 +17598,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12854,12 +17607,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12869,14 +17622,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12886,7 +17639,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) obj; - + boolean result = true; result = result && (hasLongMsg() == other.hasLongMsg()); if (hasLongMsg()) { @@ -12897,9 +17650,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLongMsg()) { @@ -12907,89 +17664,79 @@ public final class HBaseProtos { hash = (53 * hash) + hashLong(getLongMsg()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code LongMsg} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsgOrBuilder { @@ -12997,18 +17744,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13019,27 +17769,27 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); longMsg_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); if (!result.isInitialized()) { @@ -13047,17 +17797,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg(this); int from_bitField0_ = bitField0_; @@ -13070,7 +17810,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg)other); @@ -13079,7 +17819,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance()) return this; if (other.hasLongMsg()) { @@ -13088,7 +17828,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLongMsg()) { @@ -13096,119 +17836,195 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - longMsg_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int64 longMsg = 1; private long longMsg_ ; + /** + * required int64 longMsg = 1; + */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 longMsg = 1; + */ public long getLongMsg() { return longMsg_; } + /** + * required int64 longMsg = 1; + */ public Builder setLongMsg(long value) { bitField0_ |= 0x00000001; longMsg_ = value; onChanged(); return this; } + /** + * required int64 longMsg = 1; + */ public Builder clearLongMsg() { bitField0_ = (bitField0_ & ~0x00000001); longMsg_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:LongMsg) } - + static { defaultInstance = new LongMsg(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:LongMsg) } - + public interface BigDecimalMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes bigdecimalMsg = 1; + /** + * required bytes bigdecimalMsg = 1; + */ boolean hasBigdecimalMsg(); + /** + * required bytes bigdecimalMsg = 1; + */ com.google.protobuf.ByteString getBigdecimalMsg(); } + /** + * Protobuf type {@code BigDecimalMsg} + */ public static final class BigDecimalMsg extends com.google.protobuf.GeneratedMessage implements BigDecimalMsgOrBuilder { // Use BigDecimalMsg.newBuilder() to construct. - private BigDecimalMsg(Builder builder) { + private BigDecimalMsg(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BigDecimalMsg(boolean noInit) {} - + private BigDecimalMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BigDecimalMsg defaultInstance; public static BigDecimalMsg getDefaultInstance() { return defaultInstance; } - + public BigDecimalMsg getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BigDecimalMsg( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bigdecimalMsg_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BigDecimalMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BigDecimalMsg(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes bigdecimalMsg = 1; public static final int BIGDECIMALMSG_FIELD_NUMBER = 1; private com.google.protobuf.ByteString bigdecimalMsg_; + /** + * required bytes bigdecimalMsg = 1; + */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes bigdecimalMsg = 1; + */ public com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } - + private void initFields() { bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; } @@ -13216,7 +18032,7 @@ public final class HBaseProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasBigdecimalMsg()) { memoizedIsInitialized = 0; return false; @@ -13224,7 +18040,7 @@ public final class HBaseProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -13233,12 +18049,12 @@ public final class HBaseProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -13248,14 +18064,14 @@ public final class HBaseProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13265,7 +18081,7 @@ public final class HBaseProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) obj; - + boolean result = true; result = result && (hasBigdecimalMsg() == other.hasBigdecimalMsg()); if (hasBigdecimalMsg()) { @@ -13276,9 +18092,13 @@ public final class HBaseProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBigdecimalMsg()) { @@ -13286,89 +18106,79 @@ public final class HBaseProtos { hash = (53 * hash) + getBigdecimalMsg().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BigDecimalMsg} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsgOrBuilder { @@ -13376,18 +18186,21 @@ public final class HBaseProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13398,27 +18211,27 @@ public final class HBaseProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BigDecimalMsg_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg result = buildPartial(); if (!result.isInitialized()) { @@ -13426,17 +18239,7 @@ public final class HBaseProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg(this); int from_bitField0_ = bitField0_; @@ -13449,7 +18252,7 @@ public final class HBaseProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg)other); @@ -13458,7 +18261,7 @@ public final class HBaseProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance()) return this; if (other.hasBigdecimalMsg()) { @@ -13467,7 +18270,7 @@ public final class HBaseProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasBigdecimalMsg()) { @@ -13475,49 +18278,43 @@ public final class HBaseProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - bigdecimalMsg_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes bigdecimalMsg = 1; private com.google.protobuf.ByteString bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes bigdecimalMsg = 1; + */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes bigdecimalMsg = 1; + */ public com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } + /** + * required bytes bigdecimalMsg = 1; + */ public Builder setBigdecimalMsg(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -13527,24 +18324,27 @@ public final class HBaseProtos { onChanged(); return this; } + /** + * required bytes bigdecimalMsg = 1; + */ public Builder clearBigdecimalMsg() { bitField0_ = (bitField0_ & ~0x00000001); bigdecimalMsg_ = getDefaultInstance().getBigdecimalMsg(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BigDecimalMsg) } - + static { defaultInstance = new BigDecimalMsg(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BigDecimalMsg) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_Cell_descriptor; private static @@ -13645,7 +18445,7 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BigDecimalMsg_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -13725,161 +18525,121 @@ public final class HBaseProtos { internal_static_Cell_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Cell_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Cell.Builder.class); + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", }); internal_static_TableSchema_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_TableSchema_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TableSchema_descriptor, - new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", "Configuration", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); + new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", "Configuration", }); internal_static_ColumnFamilySchema_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_ColumnFamilySchema_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ColumnFamilySchema_descriptor, - new java.lang.String[] { "Name", "Attributes", "Configuration", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); + new java.lang.String[] { "Name", "Attributes", "Configuration", }); internal_static_RegionInfo_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_RegionInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionInfo_descriptor, - new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); + new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", }); internal_static_RegionSpecifier_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_RegionSpecifier_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionSpecifier_descriptor, - new java.lang.String[] { "Type", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); + new java.lang.String[] { "Type", "Value", }); internal_static_RegionLoad_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_RegionLoad_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionLoad_descriptor, - new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "CompleteSequenceId", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class); + new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "CompleteSequenceId", }); internal_static_ServerLoad_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_ServerLoad_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ServerLoad_descriptor, - new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder.class); + new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", }); internal_static_TimeRange_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_TimeRange_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TimeRange_descriptor, - new java.lang.String[] { "From", "To", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); + new java.lang.String[] { "From", "To", }); internal_static_Filter_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_Filter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Filter_descriptor, - new java.lang.String[] { "Name", "SerializedFilter", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder.class); + new java.lang.String[] { "Name", "SerializedFilter", }); internal_static_KeyValue_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_KeyValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_KeyValue_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", }); internal_static_ServerName_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_ServerName_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ServerName_descriptor, - new java.lang.String[] { "HostName", "Port", "StartCode", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + new java.lang.String[] { "HostName", "Port", "StartCode", }); internal_static_Coprocessor_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_Coprocessor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Coprocessor_descriptor, - new java.lang.String[] { "Name", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); + new java.lang.String[] { "Name", }); internal_static_NameStringPair_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_NameStringPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameStringPair_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); + new java.lang.String[] { "Name", "Value", }); internal_static_NameBytesPair_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_NameBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameBytesPair_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); + new java.lang.String[] { "Name", "Value", }); internal_static_BytesBytesPair_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_BytesBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BytesBytesPair_descriptor, - new java.lang.String[] { "First", "Second", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); + new java.lang.String[] { "First", "Second", }); internal_static_NameInt64Pair_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_NameInt64Pair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameInt64Pair_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); + new java.lang.String[] { "Name", "Value", }); internal_static_SnapshotDescription_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_SnapshotDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SnapshotDescription_descriptor, - new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); + new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", }); internal_static_EmptyMsg_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_EmptyMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EmptyMsg_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); + new java.lang.String[] { }); internal_static_LongMsg_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_LongMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LongMsg_descriptor, - new java.lang.String[] { "LongMsg", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); + new java.lang.String[] { "LongMsg", }); internal_static_BigDecimalMsg_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_BigDecimalMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BigDecimalMsg_descriptor, - new java.lang.String[] { "BigdecimalMsg", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); + new java.lang.String[] { "BigdecimalMsg", }); return null; } }; @@ -13888,6 +18648,6 @@ public final class HBaseProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java index d498d29..7f79a2b 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java @@ -10,66 +10,173 @@ public final class HFileProtos { } public interface FileInfoProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .BytesBytesPair mapEntry = 1; + /** + * repeated .BytesBytesPair mapEntry = 1; + */ java.util.List getMapEntryList(); + /** + * repeated .BytesBytesPair mapEntry = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index); + /** + * repeated .BytesBytesPair mapEntry = 1; + */ int getMapEntryCount(); + /** + * repeated .BytesBytesPair mapEntry = 1; + */ java.util.List getMapEntryOrBuilderList(); + /** + * repeated .BytesBytesPair mapEntry = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index); } + /** + * Protobuf type {@code FileInfoProto} + * + *
+   * Map of name/values
+   * 
+ */ public static final class FileInfoProto extends com.google.protobuf.GeneratedMessage implements FileInfoProtoOrBuilder { // Use FileInfoProto.newBuilder() to construct. - private FileInfoProto(Builder builder) { + private FileInfoProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FileInfoProto(boolean noInit) {} - + private FileInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FileInfoProto defaultInstance; public static FileInfoProto getDefaultInstance() { return defaultInstance; } - + public FileInfoProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FileInfoProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntry_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + mapEntry_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FileInfoProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FileInfoProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .BytesBytesPair mapEntry = 1; public static final int MAPENTRY_FIELD_NUMBER = 1; private java.util.List mapEntry_; + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public java.util.List getMapEntryList() { return mapEntry_; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public java.util.List getMapEntryOrBuilderList() { return mapEntry_; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public int getMapEntryCount() { return mapEntry_.size(); } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { return mapEntry_.get(index); } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index) { return mapEntry_.get(index); } - + private void initFields() { mapEntry_ = java.util.Collections.emptyList(); } @@ -77,7 +184,7 @@ public final class HFileProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { memoizedIsInitialized = 0; @@ -87,7 +194,7 @@ public final class HFileProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -96,12 +203,12 @@ public final class HFileProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < mapEntry_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -111,14 +218,14 @@ public final class HFileProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -128,7 +235,7 @@ public final class HFileProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto other = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto) obj; - + boolean result = true; result = result && getMapEntryList() .equals(other.getMapEntryList()); @@ -136,9 +243,13 @@ public final class HFileProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getMapEntryCount() > 0) { @@ -146,89 +257,83 @@ public final class HFileProtos { hash = (53 * hash) + getMapEntryList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FileInfoProto} + * + *
+     * Map of name/values
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder { @@ -236,18 +341,21 @@ public final class HFileProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -259,7 +367,7 @@ public final class HFileProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (mapEntryBuilder_ == null) { @@ -270,20 +378,20 @@ public final class HFileProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto build() { org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial(); if (!result.isInitialized()) { @@ -291,17 +399,7 @@ public final class HFileProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = new org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto(this); int from_bitField0_ = bitField0_; @@ -317,7 +415,7 @@ public final class HFileProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto)other); @@ -326,7 +424,7 @@ public final class HFileProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance()) return this; if (mapEntryBuilder_ == null) { @@ -358,7 +456,7 @@ public final class HFileProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { @@ -368,42 +466,26 @@ public final class HFileProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMapEntry(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .BytesBytesPair mapEntry = 1; private java.util.List mapEntry_ = java.util.Collections.emptyList(); @@ -413,10 +495,13 @@ public final class HFileProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_; - + + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public java.util.List getMapEntryList() { if (mapEntryBuilder_ == null) { return java.util.Collections.unmodifiableList(mapEntry_); @@ -424,6 +509,9 @@ public final class HFileProtos { return mapEntryBuilder_.getMessageList(); } } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public int getMapEntryCount() { if (mapEntryBuilder_ == null) { return mapEntry_.size(); @@ -431,6 +519,9 @@ public final class HFileProtos { return mapEntryBuilder_.getCount(); } } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { if (mapEntryBuilder_ == null) { return mapEntry_.get(index); @@ -438,6 +529,9 @@ public final class HFileProtos { return mapEntryBuilder_.getMessage(index); } } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder setMapEntry( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { @@ -452,6 +546,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder setMapEntry( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { @@ -463,6 +560,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder addMapEntry(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { if (value == null) { @@ -476,6 +576,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder addMapEntry( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { @@ -490,6 +593,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder addMapEntry( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { @@ -501,6 +607,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder addMapEntry( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { @@ -512,6 +621,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder addAllMapEntry( java.lang.Iterable values) { if (mapEntryBuilder_ == null) { @@ -523,6 +635,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder clearMapEntry() { if (mapEntryBuilder_ == null) { mapEntry_ = java.util.Collections.emptyList(); @@ -533,6 +648,9 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public Builder removeMapEntry(int index) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); @@ -543,10 +661,16 @@ public final class HFileProtos { } return this; } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getMapEntryBuilder( int index) { return getMapEntryFieldBuilder().getBuilder(index); } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index) { if (mapEntryBuilder_ == null) { @@ -554,6 +678,9 @@ public final class HFileProtos { return mapEntryBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public java.util.List getMapEntryOrBuilderList() { if (mapEntryBuilder_ != null) { @@ -562,15 +689,24 @@ public final class HFileProtos { return java.util.Collections.unmodifiableList(mapEntry_); } } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder() { return getMapEntryFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder( int index) { return getMapEntryFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } + /** + * repeated .BytesBytesPair mapEntry = 1; + */ public java.util.List getMapEntryBuilderList() { return getMapEntryFieldBuilder().getBuilderList(); @@ -589,240 +725,520 @@ public final class HFileProtos { } return mapEntryBuilder_; } - + // @@protoc_insertion_point(builder_scope:FileInfoProto) } - + static { defaultInstance = new FileInfoProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FileInfoProto) } - + public interface FileTrailerProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint64 fileInfoOffset = 1; + /** + * optional uint64 fileInfoOffset = 1; + */ boolean hasFileInfoOffset(); + /** + * optional uint64 fileInfoOffset = 1; + */ long getFileInfoOffset(); - + // optional uint64 loadOnOpenDataOffset = 2; + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ boolean hasLoadOnOpenDataOffset(); + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ long getLoadOnOpenDataOffset(); - + // optional uint64 uncompressedDataIndexSize = 3; + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ boolean hasUncompressedDataIndexSize(); + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ long getUncompressedDataIndexSize(); - + // optional uint64 totalUncompressedBytes = 4; + /** + * optional uint64 totalUncompressedBytes = 4; + */ boolean hasTotalUncompressedBytes(); + /** + * optional uint64 totalUncompressedBytes = 4; + */ long getTotalUncompressedBytes(); - + // optional uint32 dataIndexCount = 5; + /** + * optional uint32 dataIndexCount = 5; + */ boolean hasDataIndexCount(); + /** + * optional uint32 dataIndexCount = 5; + */ int getDataIndexCount(); - + // optional uint32 metaIndexCount = 6; + /** + * optional uint32 metaIndexCount = 6; + */ boolean hasMetaIndexCount(); + /** + * optional uint32 metaIndexCount = 6; + */ int getMetaIndexCount(); - + // optional uint64 entryCount = 7; + /** + * optional uint64 entryCount = 7; + */ boolean hasEntryCount(); + /** + * optional uint64 entryCount = 7; + */ long getEntryCount(); - + // optional uint32 numDataIndexLevels = 8; + /** + * optional uint32 numDataIndexLevels = 8; + */ boolean hasNumDataIndexLevels(); + /** + * optional uint32 numDataIndexLevels = 8; + */ int getNumDataIndexLevels(); - + // optional uint64 firstDataBlockOffset = 9; + /** + * optional uint64 firstDataBlockOffset = 9; + */ boolean hasFirstDataBlockOffset(); + /** + * optional uint64 firstDataBlockOffset = 9; + */ long getFirstDataBlockOffset(); - + // optional uint64 lastDataBlockOffset = 10; + /** + * optional uint64 lastDataBlockOffset = 10; + */ boolean hasLastDataBlockOffset(); + /** + * optional uint64 lastDataBlockOffset = 10; + */ long getLastDataBlockOffset(); - + // optional string comparatorClassName = 11; + /** + * optional string comparatorClassName = 11; + */ boolean hasComparatorClassName(); - String getComparatorClassName(); - + /** + * optional string comparatorClassName = 11; + */ + java.lang.String getComparatorClassName(); + /** + * optional string comparatorClassName = 11; + */ + com.google.protobuf.ByteString + getComparatorClassNameBytes(); + // optional uint32 compressionCodec = 12; + /** + * optional uint32 compressionCodec = 12; + */ boolean hasCompressionCodec(); + /** + * optional uint32 compressionCodec = 12; + */ int getCompressionCodec(); } + /** + * Protobuf type {@code FileTrailerProto} + * + *
+   * HFile file trailer
+   * 
+ */ public static final class FileTrailerProto extends com.google.protobuf.GeneratedMessage implements FileTrailerProtoOrBuilder { // Use FileTrailerProto.newBuilder() to construct. - private FileTrailerProto(Builder builder) { + private FileTrailerProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FileTrailerProto(boolean noInit) {} - + private FileTrailerProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FileTrailerProto defaultInstance; public static FileTrailerProto getDefaultInstance() { return defaultInstance; } - + public FileTrailerProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FileTrailerProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + fileInfoOffset_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + loadOnOpenDataOffset_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + uncompressedDataIndexSize_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + totalUncompressedBytes_ = input.readUInt64(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + dataIndexCount_ = input.readUInt32(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + metaIndexCount_ = input.readUInt32(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + entryCount_ = input.readUInt64(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + numDataIndexLevels_ = input.readUInt32(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + firstDataBlockOffset_ = input.readUInt64(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + lastDataBlockOffset_ = input.readUInt64(); + break; + } + case 90: { + bitField0_ |= 0x00000400; + comparatorClassName_ = input.readBytes(); + break; + } + case 96: { + bitField0_ |= 0x00000800; + compressionCodec_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FileTrailerProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FileTrailerProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional uint64 fileInfoOffset = 1; public static final int FILEINFOOFFSET_FIELD_NUMBER = 1; private long fileInfoOffset_; + /** + * optional uint64 fileInfoOffset = 1; + */ public boolean hasFileInfoOffset() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint64 fileInfoOffset = 1; + */ public long getFileInfoOffset() { return fileInfoOffset_; } - + // optional uint64 loadOnOpenDataOffset = 2; public static final int LOADONOPENDATAOFFSET_FIELD_NUMBER = 2; private long loadOnOpenDataOffset_; + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public boolean hasLoadOnOpenDataOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public long getLoadOnOpenDataOffset() { return loadOnOpenDataOffset_; } - + // optional uint64 uncompressedDataIndexSize = 3; public static final int UNCOMPRESSEDDATAINDEXSIZE_FIELD_NUMBER = 3; private long uncompressedDataIndexSize_; + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public boolean hasUncompressedDataIndexSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public long getUncompressedDataIndexSize() { return uncompressedDataIndexSize_; } - + // optional uint64 totalUncompressedBytes = 4; public static final int TOTALUNCOMPRESSEDBYTES_FIELD_NUMBER = 4; private long totalUncompressedBytes_; + /** + * optional uint64 totalUncompressedBytes = 4; + */ public boolean hasTotalUncompressedBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 totalUncompressedBytes = 4; + */ public long getTotalUncompressedBytes() { return totalUncompressedBytes_; } - + // optional uint32 dataIndexCount = 5; public static final int DATAINDEXCOUNT_FIELD_NUMBER = 5; private int dataIndexCount_; + /** + * optional uint32 dataIndexCount = 5; + */ public boolean hasDataIndexCount() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint32 dataIndexCount = 5; + */ public int getDataIndexCount() { return dataIndexCount_; } - + // optional uint32 metaIndexCount = 6; public static final int METAINDEXCOUNT_FIELD_NUMBER = 6; private int metaIndexCount_; + /** + * optional uint32 metaIndexCount = 6; + */ public boolean hasMetaIndexCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 metaIndexCount = 6; + */ public int getMetaIndexCount() { return metaIndexCount_; } - + // optional uint64 entryCount = 7; public static final int ENTRYCOUNT_FIELD_NUMBER = 7; private long entryCount_; + /** + * optional uint64 entryCount = 7; + */ public boolean hasEntryCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint64 entryCount = 7; + */ public long getEntryCount() { return entryCount_; } - + // optional uint32 numDataIndexLevels = 8; public static final int NUMDATAINDEXLEVELS_FIELD_NUMBER = 8; private int numDataIndexLevels_; + /** + * optional uint32 numDataIndexLevels = 8; + */ public boolean hasNumDataIndexLevels() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint32 numDataIndexLevels = 8; + */ public int getNumDataIndexLevels() { return numDataIndexLevels_; } - + // optional uint64 firstDataBlockOffset = 9; public static final int FIRSTDATABLOCKOFFSET_FIELD_NUMBER = 9; private long firstDataBlockOffset_; + /** + * optional uint64 firstDataBlockOffset = 9; + */ public boolean hasFirstDataBlockOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint64 firstDataBlockOffset = 9; + */ public long getFirstDataBlockOffset() { return firstDataBlockOffset_; } - + // optional uint64 lastDataBlockOffset = 10; public static final int LASTDATABLOCKOFFSET_FIELD_NUMBER = 10; private long lastDataBlockOffset_; + /** + * optional uint64 lastDataBlockOffset = 10; + */ public boolean hasLastDataBlockOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint64 lastDataBlockOffset = 10; + */ public long getLastDataBlockOffset() { return lastDataBlockOffset_; } - + // optional string comparatorClassName = 11; public static final int COMPARATORCLASSNAME_FIELD_NUMBER = 11; private java.lang.Object comparatorClassName_; + /** + * optional string comparatorClassName = 11; + */ public boolean hasComparatorClassName() { return ((bitField0_ & 0x00000400) == 0x00000400); } - public String getComparatorClassName() { + /** + * optional string comparatorClassName = 11; + */ + public java.lang.String getComparatorClassName() { java.lang.Object ref = comparatorClassName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { comparatorClassName_ = s; } return s; } } - private com.google.protobuf.ByteString getComparatorClassNameBytes() { + /** + * optional string comparatorClassName = 11; + */ + public com.google.protobuf.ByteString + getComparatorClassNameBytes() { java.lang.Object ref = comparatorClassName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); comparatorClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional uint32 compressionCodec = 12; public static final int COMPRESSIONCODEC_FIELD_NUMBER = 12; private int compressionCodec_; + /** + * optional uint32 compressionCodec = 12; + */ public boolean hasCompressionCodec() { return ((bitField0_ & 0x00000800) == 0x00000800); } + /** + * optional uint32 compressionCodec = 12; + */ public int getCompressionCodec() { return compressionCodec_; } - + private void initFields() { fileInfoOffset_ = 0L; loadOnOpenDataOffset_ = 0L; @@ -841,11 +1257,11 @@ public final class HFileProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -887,12 +1303,12 @@ public final class HFileProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -946,14 +1362,14 @@ public final class HFileProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -963,7 +1379,7 @@ public final class HFileProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto other = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto) obj; - + boolean result = true; result = result && (hasFileInfoOffset() == other.hasFileInfoOffset()); if (hasFileInfoOffset()) { @@ -1029,9 +1445,13 @@ public final class HFileProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFileInfoOffset()) { @@ -1083,89 +1503,83 @@ public final class HFileProtos { hash = (53 * hash) + getCompressionCodec(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FileTrailerProto} + * + *
+     * HFile file trailer
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder { @@ -1173,18 +1587,21 @@ public final class HFileProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1195,7 +1612,7 @@ public final class HFileProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); fileInfoOffset_ = 0L; @@ -1224,20 +1641,20 @@ public final class HFileProtos { bitField0_ = (bitField0_ & ~0x00000800); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto build() { org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial(); if (!result.isInitialized()) { @@ -1245,17 +1662,7 @@ public final class HFileProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = new org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto(this); int from_bitField0_ = bitField0_; @@ -1312,7 +1719,7 @@ public final class HFileProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto)other); @@ -1321,7 +1728,7 @@ public final class HFileProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance()) return this; if (other.hasFileInfoOffset()) { @@ -1355,7 +1762,9 @@ public final class HFileProtos { setLastDataBlockOffset(other.getLastDataBlockOffset()); } if (other.hasComparatorClassName()) { - setComparatorClassName(other.getComparatorClassName()); + bitField0_ |= 0x00000400; + comparatorClassName_ = other.comparatorClassName_; + onChanged(); } if (other.hasCompressionCodec()) { setCompressionCodec(other.getCompressionCodec()); @@ -1363,326 +1772,403 @@ public final class HFileProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - fileInfoOffset_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - loadOnOpenDataOffset_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - uncompressedDataIndexSize_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - totalUncompressedBytes_ = input.readUInt64(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - dataIndexCount_ = input.readUInt32(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - metaIndexCount_ = input.readUInt32(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - entryCount_ = input.readUInt64(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - numDataIndexLevels_ = input.readUInt32(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - firstDataBlockOffset_ = input.readUInt64(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - lastDataBlockOffset_ = input.readUInt64(); - break; - } - case 90: { - bitField0_ |= 0x00000400; - comparatorClassName_ = input.readBytes(); - break; - } - case 96: { - bitField0_ |= 0x00000800; - compressionCodec_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint64 fileInfoOffset = 1; private long fileInfoOffset_ ; + /** + * optional uint64 fileInfoOffset = 1; + */ public boolean hasFileInfoOffset() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint64 fileInfoOffset = 1; + */ public long getFileInfoOffset() { return fileInfoOffset_; } + /** + * optional uint64 fileInfoOffset = 1; + */ public Builder setFileInfoOffset(long value) { bitField0_ |= 0x00000001; fileInfoOffset_ = value; onChanged(); return this; } + /** + * optional uint64 fileInfoOffset = 1; + */ public Builder clearFileInfoOffset() { bitField0_ = (bitField0_ & ~0x00000001); fileInfoOffset_ = 0L; onChanged(); return this; } - + // optional uint64 loadOnOpenDataOffset = 2; private long loadOnOpenDataOffset_ ; + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public boolean hasLoadOnOpenDataOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public long getLoadOnOpenDataOffset() { return loadOnOpenDataOffset_; } + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public Builder setLoadOnOpenDataOffset(long value) { bitField0_ |= 0x00000002; loadOnOpenDataOffset_ = value; onChanged(); return this; } + /** + * optional uint64 loadOnOpenDataOffset = 2; + */ public Builder clearLoadOnOpenDataOffset() { bitField0_ = (bitField0_ & ~0x00000002); loadOnOpenDataOffset_ = 0L; onChanged(); return this; } - + // optional uint64 uncompressedDataIndexSize = 3; private long uncompressedDataIndexSize_ ; + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public boolean hasUncompressedDataIndexSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public long getUncompressedDataIndexSize() { return uncompressedDataIndexSize_; } + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public Builder setUncompressedDataIndexSize(long value) { bitField0_ |= 0x00000004; uncompressedDataIndexSize_ = value; onChanged(); return this; } + /** + * optional uint64 uncompressedDataIndexSize = 3; + */ public Builder clearUncompressedDataIndexSize() { bitField0_ = (bitField0_ & ~0x00000004); uncompressedDataIndexSize_ = 0L; onChanged(); return this; } - + // optional uint64 totalUncompressedBytes = 4; private long totalUncompressedBytes_ ; + /** + * optional uint64 totalUncompressedBytes = 4; + */ public boolean hasTotalUncompressedBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional uint64 totalUncompressedBytes = 4; + */ public long getTotalUncompressedBytes() { return totalUncompressedBytes_; } + /** + * optional uint64 totalUncompressedBytes = 4; + */ public Builder setTotalUncompressedBytes(long value) { bitField0_ |= 0x00000008; totalUncompressedBytes_ = value; onChanged(); return this; } + /** + * optional uint64 totalUncompressedBytes = 4; + */ public Builder clearTotalUncompressedBytes() { bitField0_ = (bitField0_ & ~0x00000008); totalUncompressedBytes_ = 0L; onChanged(); return this; } - + // optional uint32 dataIndexCount = 5; private int dataIndexCount_ ; + /** + * optional uint32 dataIndexCount = 5; + */ public boolean hasDataIndexCount() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional uint32 dataIndexCount = 5; + */ public int getDataIndexCount() { return dataIndexCount_; } + /** + * optional uint32 dataIndexCount = 5; + */ public Builder setDataIndexCount(int value) { bitField0_ |= 0x00000010; dataIndexCount_ = value; onChanged(); return this; } + /** + * optional uint32 dataIndexCount = 5; + */ public Builder clearDataIndexCount() { bitField0_ = (bitField0_ & ~0x00000010); dataIndexCount_ = 0; onChanged(); return this; } - + // optional uint32 metaIndexCount = 6; private int metaIndexCount_ ; + /** + * optional uint32 metaIndexCount = 6; + */ public boolean hasMetaIndexCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } + /** + * optional uint32 metaIndexCount = 6; + */ public int getMetaIndexCount() { return metaIndexCount_; } + /** + * optional uint32 metaIndexCount = 6; + */ public Builder setMetaIndexCount(int value) { bitField0_ |= 0x00000020; metaIndexCount_ = value; onChanged(); return this; } + /** + * optional uint32 metaIndexCount = 6; + */ public Builder clearMetaIndexCount() { bitField0_ = (bitField0_ & ~0x00000020); metaIndexCount_ = 0; onChanged(); return this; } - + // optional uint64 entryCount = 7; private long entryCount_ ; + /** + * optional uint64 entryCount = 7; + */ public boolean hasEntryCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } + /** + * optional uint64 entryCount = 7; + */ public long getEntryCount() { return entryCount_; } + /** + * optional uint64 entryCount = 7; + */ public Builder setEntryCount(long value) { bitField0_ |= 0x00000040; entryCount_ = value; onChanged(); return this; } + /** + * optional uint64 entryCount = 7; + */ public Builder clearEntryCount() { bitField0_ = (bitField0_ & ~0x00000040); entryCount_ = 0L; onChanged(); return this; } - + // optional uint32 numDataIndexLevels = 8; private int numDataIndexLevels_ ; + /** + * optional uint32 numDataIndexLevels = 8; + */ public boolean hasNumDataIndexLevels() { return ((bitField0_ & 0x00000080) == 0x00000080); } + /** + * optional uint32 numDataIndexLevels = 8; + */ public int getNumDataIndexLevels() { return numDataIndexLevels_; } + /** + * optional uint32 numDataIndexLevels = 8; + */ public Builder setNumDataIndexLevels(int value) { bitField0_ |= 0x00000080; numDataIndexLevels_ = value; onChanged(); return this; } + /** + * optional uint32 numDataIndexLevels = 8; + */ public Builder clearNumDataIndexLevels() { bitField0_ = (bitField0_ & ~0x00000080); numDataIndexLevels_ = 0; onChanged(); return this; } - + // optional uint64 firstDataBlockOffset = 9; private long firstDataBlockOffset_ ; + /** + * optional uint64 firstDataBlockOffset = 9; + */ public boolean hasFirstDataBlockOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } + /** + * optional uint64 firstDataBlockOffset = 9; + */ public long getFirstDataBlockOffset() { return firstDataBlockOffset_; } + /** + * optional uint64 firstDataBlockOffset = 9; + */ public Builder setFirstDataBlockOffset(long value) { bitField0_ |= 0x00000100; firstDataBlockOffset_ = value; onChanged(); return this; } + /** + * optional uint64 firstDataBlockOffset = 9; + */ public Builder clearFirstDataBlockOffset() { bitField0_ = (bitField0_ & ~0x00000100); firstDataBlockOffset_ = 0L; onChanged(); return this; } - + // optional uint64 lastDataBlockOffset = 10; private long lastDataBlockOffset_ ; + /** + * optional uint64 lastDataBlockOffset = 10; + */ public boolean hasLastDataBlockOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } + /** + * optional uint64 lastDataBlockOffset = 10; + */ public long getLastDataBlockOffset() { return lastDataBlockOffset_; } + /** + * optional uint64 lastDataBlockOffset = 10; + */ public Builder setLastDataBlockOffset(long value) { bitField0_ |= 0x00000200; lastDataBlockOffset_ = value; onChanged(); return this; } + /** + * optional uint64 lastDataBlockOffset = 10; + */ public Builder clearLastDataBlockOffset() { bitField0_ = (bitField0_ & ~0x00000200); lastDataBlockOffset_ = 0L; onChanged(); return this; } - + // optional string comparatorClassName = 11; private java.lang.Object comparatorClassName_ = ""; + /** + * optional string comparatorClassName = 11; + */ public boolean hasComparatorClassName() { return ((bitField0_ & 0x00000400) == 0x00000400); } - public String getComparatorClassName() { + /** + * optional string comparatorClassName = 11; + */ + public java.lang.String getComparatorClassName() { java.lang.Object ref = comparatorClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); comparatorClassName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setComparatorClassName(String value) { + /** + * optional string comparatorClassName = 11; + */ + public com.google.protobuf.ByteString + getComparatorClassNameBytes() { + java.lang.Object ref = comparatorClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + comparatorClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string comparatorClassName = 11; + */ + public Builder setComparatorClassName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1691,50 +2177,73 @@ public final class HFileProtos { onChanged(); return this; } + /** + * optional string comparatorClassName = 11; + */ public Builder clearComparatorClassName() { bitField0_ = (bitField0_ & ~0x00000400); comparatorClassName_ = getDefaultInstance().getComparatorClassName(); onChanged(); return this; } - void setComparatorClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000400; + /** + * optional string comparatorClassName = 11; + */ + public Builder setComparatorClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000400; comparatorClassName_ = value; onChanged(); + return this; } - + // optional uint32 compressionCodec = 12; private int compressionCodec_ ; + /** + * optional uint32 compressionCodec = 12; + */ public boolean hasCompressionCodec() { return ((bitField0_ & 0x00000800) == 0x00000800); } + /** + * optional uint32 compressionCodec = 12; + */ public int getCompressionCodec() { return compressionCodec_; } + /** + * optional uint32 compressionCodec = 12; + */ public Builder setCompressionCodec(int value) { bitField0_ |= 0x00000800; compressionCodec_ = value; onChanged(); return this; } + /** + * optional uint32 compressionCodec = 12; + */ public Builder clearCompressionCodec() { bitField0_ = (bitField0_ & ~0x00000800); compressionCodec_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:FileTrailerProto) } - + static { defaultInstance = new FileTrailerProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FileTrailerProto) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_FileInfoProto_descriptor; private static @@ -1745,7 +2254,7 @@ public final class HFileProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_FileTrailerProto_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1778,17 +2287,13 @@ public final class HFileProtos { internal_static_FileInfoProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FileInfoProto_descriptor, - new java.lang.String[] { "MapEntry", }, - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); + new java.lang.String[] { "MapEntry", }); internal_static_FileTrailerProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_FileTrailerProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FileTrailerProto_descriptor, - new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", }, - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class, - org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); + new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", }); return null; } }; @@ -1798,6 +2303,6 @@ public final class HFileProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java index 48243e6..4f78018 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java @@ -10,50 +10,129 @@ public final class LoadBalancerProtos { } public interface LoadBalancerStateOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bool balancerOn = 1; + /** + * optional bool balancerOn = 1; + */ boolean hasBalancerOn(); + /** + * optional bool balancerOn = 1; + */ boolean getBalancerOn(); } + /** + * Protobuf type {@code LoadBalancerState} + */ public static final class LoadBalancerState extends com.google.protobuf.GeneratedMessage implements LoadBalancerStateOrBuilder { // Use LoadBalancerState.newBuilder() to construct. - private LoadBalancerState(Builder builder) { + private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private LoadBalancerState(boolean noInit) {} - + private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final LoadBalancerState defaultInstance; public static LoadBalancerState getDefaultInstance() { return defaultInstance; } - + public LoadBalancerState getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private LoadBalancerState( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + balancerOn_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public LoadBalancerState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LoadBalancerState(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bool balancerOn = 1; public static final int BALANCERON_FIELD_NUMBER = 1; private boolean balancerOn_; + /** + * optional bool balancerOn = 1; + */ public boolean hasBalancerOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool balancerOn = 1; + */ public boolean getBalancerOn() { return balancerOn_; } - + private void initFields() { balancerOn_ = false; } @@ -61,11 +140,11 @@ public final class LoadBalancerProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -74,12 +153,12 @@ public final class LoadBalancerProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -89,14 +168,14 @@ public final class LoadBalancerProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -106,7 +185,7 @@ public final class LoadBalancerProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) obj; - + boolean result = true; result = result && (hasBalancerOn() == other.hasBalancerOn()); if (hasBalancerOn()) { @@ -117,9 +196,13 @@ public final class LoadBalancerProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerOn()) { @@ -127,89 +210,79 @@ public final class LoadBalancerProtos { hash = (53 * hash) + hashBoolean(getBalancerOn()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code LoadBalancerState} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder { @@ -217,18 +290,21 @@ public final class LoadBalancerProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -239,27 +315,27 @@ public final class LoadBalancerProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); balancerOn_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState build() { org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); if (!result.isInitialized()) { @@ -267,17 +343,7 @@ public final class LoadBalancerProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() { org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this); int from_bitField0_ = bitField0_; @@ -290,7 +356,7 @@ public final class LoadBalancerProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other); @@ -299,7 +365,7 @@ public final class LoadBalancerProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other) { if (other == org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance()) return this; if (other.hasBalancerOn()) { @@ -308,83 +374,80 @@ public final class LoadBalancerProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - balancerOn_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bool balancerOn = 1; private boolean balancerOn_ ; + /** + * optional bool balancerOn = 1; + */ public boolean hasBalancerOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool balancerOn = 1; + */ public boolean getBalancerOn() { return balancerOn_; } + /** + * optional bool balancerOn = 1; + */ public Builder setBalancerOn(boolean value) { bitField0_ |= 0x00000001; balancerOn_ = value; onChanged(); return this; } + /** + * optional bool balancerOn = 1; + */ public Builder clearBalancerOn() { bitField0_ = (bitField0_ & ~0x00000001); balancerOn_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:LoadBalancerState) } - + static { defaultInstance = new LoadBalancerState(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:LoadBalancerState) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_LoadBalancerState_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_LoadBalancerState_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -408,9 +471,7 @@ public final class LoadBalancerProtos { internal_static_LoadBalancerState_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LoadBalancerState_descriptor, - new java.lang.String[] { "BalancerOn", }, - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); + new java.lang.String[] { "BalancerOn", }); return null; } }; @@ -419,6 +480,6 @@ public final class LoadBalancerProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java index 5879e96..3d55de1 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java @@ -10,66 +10,169 @@ public final class MapReduceProtos { } public interface ScanMetricsOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .NameInt64Pair metrics = 1; + /** + * repeated .NameInt64Pair metrics = 1; + */ java.util.List getMetricsList(); + /** + * repeated .NameInt64Pair metrics = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index); + /** + * repeated .NameInt64Pair metrics = 1; + */ int getMetricsCount(); + /** + * repeated .NameInt64Pair metrics = 1; + */ java.util.List getMetricsOrBuilderList(); + /** + * repeated .NameInt64Pair metrics = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder( int index); } + /** + * Protobuf type {@code ScanMetrics} + */ public static final class ScanMetrics extends com.google.protobuf.GeneratedMessage implements ScanMetricsOrBuilder { // Use ScanMetrics.newBuilder() to construct. - private ScanMetrics(Builder builder) { + private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ScanMetrics(boolean noInit) {} - + private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ScanMetrics defaultInstance; public static ScanMetrics getDefaultInstance() { return defaultInstance; } - + public ScanMetrics getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ScanMetrics( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + metrics_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + metrics_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + metrics_ = java.util.Collections.unmodifiableList(metrics_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ScanMetrics parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanMetrics(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .NameInt64Pair metrics = 1; public static final int METRICS_FIELD_NUMBER = 1; private java.util.List metrics_; + /** + * repeated .NameInt64Pair metrics = 1; + */ public java.util.List getMetricsList() { return metrics_; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public java.util.List getMetricsOrBuilderList() { return metrics_; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public int getMetricsCount() { return metrics_.size(); } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) { return metrics_.get(index); } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder( int index) { return metrics_.get(index); } - + private void initFields() { metrics_ = java.util.Collections.emptyList(); } @@ -77,11 +180,11 @@ public final class MapReduceProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -90,12 +193,12 @@ public final class MapReduceProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < metrics_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -105,14 +208,14 @@ public final class MapReduceProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -122,7 +225,7 @@ public final class MapReduceProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) obj; - + boolean result = true; result = result && getMetricsList() .equals(other.getMetricsList()); @@ -130,9 +233,13 @@ public final class MapReduceProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getMetricsCount() > 0) { @@ -140,89 +247,79 @@ public final class MapReduceProtos { hash = (53 * hash) + getMetricsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ScanMetrics} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder { @@ -230,18 +327,21 @@ public final class MapReduceProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -253,7 +353,7 @@ public final class MapReduceProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (metricsBuilder_ == null) { @@ -264,20 +364,20 @@ public final class MapReduceProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics build() { org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial(); if (!result.isInitialized()) { @@ -285,17 +385,7 @@ public final class MapReduceProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = new org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics(this); int from_bitField0_ = bitField0_; @@ -311,7 +401,7 @@ public final class MapReduceProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics)other); @@ -320,7 +410,7 @@ public final class MapReduceProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) return this; if (metricsBuilder_ == null) { @@ -352,46 +442,30 @@ public final class MapReduceProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMetrics(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .NameInt64Pair metrics = 1; private java.util.List metrics_ = java.util.Collections.emptyList(); @@ -401,10 +475,13 @@ public final class MapReduceProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_; - + + /** + * repeated .NameInt64Pair metrics = 1; + */ public java.util.List getMetricsList() { if (metricsBuilder_ == null) { return java.util.Collections.unmodifiableList(metrics_); @@ -412,6 +489,9 @@ public final class MapReduceProtos { return metricsBuilder_.getMessageList(); } } + /** + * repeated .NameInt64Pair metrics = 1; + */ public int getMetricsCount() { if (metricsBuilder_ == null) { return metrics_.size(); @@ -419,6 +499,9 @@ public final class MapReduceProtos { return metricsBuilder_.getCount(); } } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) { if (metricsBuilder_ == null) { return metrics_.get(index); @@ -426,6 +509,9 @@ public final class MapReduceProtos { return metricsBuilder_.getMessage(index); } } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder setMetrics( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) { if (metricsBuilder_ == null) { @@ -440,6 +526,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder setMetrics( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) { if (metricsBuilder_ == null) { @@ -451,6 +540,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) { if (metricsBuilder_ == null) { if (value == null) { @@ -464,6 +556,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder addMetrics( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) { if (metricsBuilder_ == null) { @@ -478,6 +573,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder addMetrics( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) { if (metricsBuilder_ == null) { @@ -489,6 +587,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder addMetrics( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) { if (metricsBuilder_ == null) { @@ -500,6 +601,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder addAllMetrics( java.lang.Iterable values) { if (metricsBuilder_ == null) { @@ -511,6 +615,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder clearMetrics() { if (metricsBuilder_ == null) { metrics_ = java.util.Collections.emptyList(); @@ -521,6 +628,9 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public Builder removeMetrics(int index) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); @@ -531,10 +641,16 @@ public final class MapReduceProtos { } return this; } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder( int index) { return getMetricsFieldBuilder().getBuilder(index); } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder( int index) { if (metricsBuilder_ == null) { @@ -542,6 +658,9 @@ public final class MapReduceProtos { return metricsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameInt64Pair metrics = 1; + */ public java.util.List getMetricsOrBuilderList() { if (metricsBuilder_ != null) { @@ -550,15 +669,24 @@ public final class MapReduceProtos { return java.util.Collections.unmodifiableList(metrics_); } } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() { return getMetricsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()); } + /** + * repeated .NameInt64Pair metrics = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder( int index) { return getMetricsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()); } + /** + * repeated .NameInt64Pair metrics = 1; + */ public java.util.List getMetricsBuilderList() { return getMetricsFieldBuilder().getBuilderList(); @@ -577,24 +705,24 @@ public final class MapReduceProtos { } return metricsBuilder_; } - + // @@protoc_insertion_point(builder_scope:ScanMetrics) } - + static { defaultInstance = new ScanMetrics(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ScanMetrics) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_ScanMetrics_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ScanMetrics_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -618,9 +746,7 @@ public final class MapReduceProtos { internal_static_ScanMetrics_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanMetrics_descriptor, - new java.lang.String[] { "Metrics", }, - org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, - org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class); + new java.lang.String[] { "Metrics", }); return null; } }; @@ -630,6 +756,6 @@ public final class MapReduceProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java index 388c12c..4ad97e7 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java @@ -10,68 +10,178 @@ public final class MasterAdminProtos { } public interface AddColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); - + // required .ColumnFamilySchema columnFamilies = 2; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ boolean hasColumnFamilies(); + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } + /** + * Protobuf type {@code AddColumnRequest} + */ public static final class AddColumnRequest extends com.google.protobuf.GeneratedMessage implements AddColumnRequestOrBuilder { // Use AddColumnRequest.newBuilder() to construct. - private AddColumnRequest(Builder builder) { + private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AddColumnRequest(boolean noInit) {} - + private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AddColumnRequest defaultInstance; public static AddColumnRequest getDefaultInstance() { return defaultInstance; } - + public AddColumnRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddColumnRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = columnFamilies_.toBuilder(); + } + columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(columnFamilies_); + columnFamilies_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddColumnRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + // required .ColumnFamilySchema columnFamilies = 2; public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); @@ -80,7 +190,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -96,7 +206,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -108,12 +218,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -127,14 +237,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -144,7 +254,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -160,9 +270,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -174,89 +288,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AddColumnRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequestOrBuilder { @@ -264,18 +368,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -287,7 +394,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -300,20 +407,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); if (!result.isInitialized()) { @@ -321,17 +428,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest(this); int from_bitField0_ = bitField0_; @@ -352,7 +449,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)other); @@ -361,7 +458,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -373,7 +470,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -389,58 +486,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - if (hasColumnFamilies()) { - subBuilder.mergeFrom(getColumnFamilies()); - } - input.readMessage(subBuilder, extensionRegistry); - setColumnFamilies(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -450,20 +532,29 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // required .ColumnFamilySchema columnFamilies = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; @@ -471,6 +562,9 @@ public final class MasterAdminProtos { return columnFamiliesBuilder_.getMessage(); } } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { @@ -484,6 +578,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { @@ -495,6 +592,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -511,6 +611,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); @@ -521,11 +624,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); @@ -533,6 +642,9 @@ public final class MasterAdminProtos { return columnFamilies_; } } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { @@ -546,84 +658,145 @@ public final class MasterAdminProtos { } return columnFamiliesBuilder_; } - + // @@protoc_insertion_point(builder_scope:AddColumnRequest) } - + static { defaultInstance = new AddColumnRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AddColumnRequest) } - + public interface AddColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code AddColumnResponse} + */ public static final class AddColumnResponse extends com.google.protobuf.GeneratedMessage implements AddColumnResponseOrBuilder { // Use AddColumnResponse.newBuilder() to construct. - private AddColumnResponse(Builder builder) { + private AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AddColumnResponse(boolean noInit) {} - + private AddColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AddColumnResponse defaultInstance; public static AddColumnResponse getDefaultInstance() { return defaultInstance; } - + public AddColumnResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddColumnResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddColumnResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -633,101 +806,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AddColumnResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponseOrBuilder { @@ -735,18 +902,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -757,25 +927,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); if (!result.isInitialized()) { @@ -783,23 +953,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)other); @@ -808,116 +968,202 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:AddColumnResponse) } - + static { defaultInstance = new AddColumnResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AddColumnResponse) } - + public interface DeleteColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); - + // required bytes columnName = 2; + /** + * required bytes columnName = 2; + */ boolean hasColumnName(); + /** + * required bytes columnName = 2; + */ com.google.protobuf.ByteString getColumnName(); } + /** + * Protobuf type {@code DeleteColumnRequest} + */ public static final class DeleteColumnRequest extends com.google.protobuf.GeneratedMessage implements DeleteColumnRequestOrBuilder { // Use DeleteColumnRequest.newBuilder() to construct. - private DeleteColumnRequest(Builder builder) { + private DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteColumnRequest(boolean noInit) {} - + private DeleteColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteColumnRequest defaultInstance; public static DeleteColumnRequest getDefaultInstance() { return defaultInstance; } - + public DeleteColumnRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteColumnRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteColumnRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + // required bytes columnName = 2; public static final int COLUMNNAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnName_; + /** + * required bytes columnName = 2; + */ public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes columnName = 2; + */ public com.google.protobuf.ByteString getColumnName() { return columnName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnName_ = com.google.protobuf.ByteString.EMPTY; @@ -926,7 +1172,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -938,7 +1184,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -950,12 +1196,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -969,14 +1215,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -986,7 +1232,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -1002,9 +1248,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -1016,89 +1266,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getColumnName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteColumnRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequestOrBuilder { @@ -1106,18 +1346,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1128,7 +1371,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -1137,20 +1380,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1158,17 +1401,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest(this); int from_bitField0_ = bitField0_; @@ -1185,7 +1418,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)other); @@ -1194,7 +1427,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -1206,7 +1439,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -1218,54 +1451,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1275,21 +1497,33 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // required bytes columnName = 2; private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes columnName = 2; + */ public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes columnName = 2; + */ public com.google.protobuf.ByteString getColumnName() { return columnName_; } + /** + * required bytes columnName = 2; + */ public Builder setColumnName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1299,90 +1533,154 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes columnName = 2; + */ public Builder clearColumnName() { bitField0_ = (bitField0_ & ~0x00000002); columnName_ = getDefaultInstance().getColumnName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) } - + static { defaultInstance = new DeleteColumnRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteColumnRequest) } - + public interface DeleteColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code DeleteColumnResponse} + */ public static final class DeleteColumnResponse extends com.google.protobuf.GeneratedMessage implements DeleteColumnResponseOrBuilder { // Use DeleteColumnResponse.newBuilder() to construct. - private DeleteColumnResponse(Builder builder) { + private DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteColumnResponse(boolean noInit) {} - + private DeleteColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteColumnResponse defaultInstance; public static DeleteColumnResponse getDefaultInstance() { return defaultInstance; } - + public DeleteColumnResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteColumnResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteColumnResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1392,101 +1690,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteColumnResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponseOrBuilder { @@ -1494,18 +1786,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1516,25 +1811,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1542,23 +1837,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)other); @@ -1567,120 +1852,220 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) } - + static { defaultInstance = new DeleteColumnResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteColumnResponse) } - + public interface ModifyColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); - + // required .ColumnFamilySchema columnFamilies = 2; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ boolean hasColumnFamilies(); + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } + /** + * Protobuf type {@code ModifyColumnRequest} + */ public static final class ModifyColumnRequest extends com.google.protobuf.GeneratedMessage implements ModifyColumnRequestOrBuilder { // Use ModifyColumnRequest.newBuilder() to construct. - private ModifyColumnRequest(Builder builder) { + private ModifyColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ModifyColumnRequest(boolean noInit) {} - + private ModifyColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ModifyColumnRequest defaultInstance; public static ModifyColumnRequest getDefaultInstance() { return defaultInstance; } - + public ModifyColumnRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ModifyColumnRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = columnFamilies_.toBuilder(); + } + columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(columnFamilies_); + columnFamilies_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ModifyColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyColumnRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + // required .ColumnFamilySchema columnFamilies = 2; public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); @@ -1689,7 +2074,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -1705,7 +2090,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1717,12 +2102,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1736,14 +2121,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1753,7 +2138,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -1769,9 +2154,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -1783,89 +2172,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ModifyColumnRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequestOrBuilder { @@ -1873,18 +2252,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1896,7 +2278,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -1909,20 +2291,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1930,17 +2312,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest(this); int from_bitField0_ = bitField0_; @@ -1961,7 +2333,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)other); @@ -1970,7 +2342,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -1982,7 +2354,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -1998,58 +2370,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - if (hasColumnFamilies()) { - subBuilder.mergeFrom(getColumnFamilies()); - } - input.readMessage(subBuilder, extensionRegistry); - setColumnFamilies(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2059,20 +2416,29 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // required .ColumnFamilySchema columnFamilies = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; @@ -2080,6 +2446,9 @@ public final class MasterAdminProtos { return columnFamiliesBuilder_.getMessage(); } } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { @@ -2093,6 +2462,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { @@ -2104,6 +2476,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -2120,6 +2495,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); @@ -2130,11 +2508,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); @@ -2142,6 +2526,9 @@ public final class MasterAdminProtos { return columnFamilies_; } } + /** + * required .ColumnFamilySchema columnFamilies = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { @@ -2155,84 +2542,145 @@ public final class MasterAdminProtos { } return columnFamiliesBuilder_; } - + // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) } - + static { defaultInstance = new ModifyColumnRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ModifyColumnRequest) } - + public interface ModifyColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ModifyColumnResponse} + */ public static final class ModifyColumnResponse extends com.google.protobuf.GeneratedMessage implements ModifyColumnResponseOrBuilder { // Use ModifyColumnResponse.newBuilder() to construct. - private ModifyColumnResponse(Builder builder) { + private ModifyColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ModifyColumnResponse(boolean noInit) {} - + private ModifyColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ModifyColumnResponse defaultInstance; public static ModifyColumnResponse getDefaultInstance() { return defaultInstance; } - + public ModifyColumnResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ModifyColumnResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; - } - - private void initFields() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ModifyColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyColumnResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2242,101 +2690,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ModifyColumnResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponseOrBuilder { @@ -2344,18 +2786,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2366,25 +2811,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2392,23 +2837,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)other); @@ -2417,124 +2852,238 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) } - + static { defaultInstance = new ModifyColumnResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ModifyColumnResponse) } - + public interface MoveRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional .ServerName destServerName = 2; + /** + * optional .ServerName destServerName = 2; + */ boolean hasDestServerName(); + /** + * optional .ServerName destServerName = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName(); + /** + * optional .ServerName destServerName = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder(); } + /** + * Protobuf type {@code MoveRegionRequest} + */ public static final class MoveRegionRequest extends com.google.protobuf.GeneratedMessage implements MoveRegionRequestOrBuilder { // Use MoveRegionRequest.newBuilder() to construct. - private MoveRegionRequest(Builder builder) { + private MoveRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MoveRegionRequest(boolean noInit) {} - + private MoveRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MoveRegionRequest defaultInstance; public static MoveRegionRequest getDefaultInstance() { return defaultInstance; } - + public MoveRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MoveRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = destServerName_.toBuilder(); + } + destServerName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(destServerName_); + destServerName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MoveRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MoveRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional .ServerName destServerName = 2; public static final int DESTSERVERNAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_; + /** + * optional .ServerName destServerName = 2; + */ public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerName destServerName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { return destServerName_; } + /** + * optional .ServerName destServerName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { return destServerName_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -2543,7 +3092,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -2561,7 +3110,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2573,12 +3122,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2592,14 +3141,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2609,7 +3158,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -2625,9 +3174,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -2639,89 +3192,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getDestServerName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MoveRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequestOrBuilder { @@ -2729,18 +3272,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2753,7 +3299,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -2770,20 +3316,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2791,17 +3337,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest(this); int from_bitField0_ = bitField0_; @@ -2826,7 +3362,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)other); @@ -2835,7 +3371,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -2847,7 +3383,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -2865,61 +3401,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasDestServerName()) { - subBuilder.mergeFrom(getDestServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setDestServerName(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -2927,6 +3441,9 @@ public final class MasterAdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -2940,6 +3457,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -2951,6 +3471,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2967,6 +3490,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -2977,11 +3503,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -2989,6 +3521,9 @@ public final class MasterAdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -3002,14 +3537,20 @@ public final class MasterAdminProtos { } return regionBuilder_; } - + // optional .ServerName destServerName = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; + /** + * optional .ServerName destServerName = 2; + */ public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerName destServerName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { if (destServerNameBuilder_ == null) { return destServerName_; @@ -3017,6 +3558,9 @@ public final class MasterAdminProtos { return destServerNameBuilder_.getMessage(); } } + /** + * optional .ServerName destServerName = 2; + */ public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (value == null) { @@ -3030,6 +3574,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName destServerName = 2; + */ public Builder setDestServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (destServerNameBuilder_ == null) { @@ -3041,6 +3588,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName destServerName = 2; + */ public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -3057,6 +3607,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName destServerName = 2; + */ public Builder clearDestServerName() { if (destServerNameBuilder_ == null) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -3067,11 +3620,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .ServerName destServerName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDestServerNameFieldBuilder().getBuilder(); } + /** + * optional .ServerName destServerName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { if (destServerNameBuilder_ != null) { return destServerNameBuilder_.getMessageOrBuilder(); @@ -3079,6 +3638,9 @@ public final class MasterAdminProtos { return destServerName_; } } + /** + * optional .ServerName destServerName = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestServerNameFieldBuilder() { @@ -3092,84 +3654,145 @@ public final class MasterAdminProtos { } return destServerNameBuilder_; } - + // @@protoc_insertion_point(builder_scope:MoveRegionRequest) } - + static { defaultInstance = new MoveRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MoveRegionRequest) } - + public interface MoveRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code MoveRegionResponse} + */ public static final class MoveRegionResponse extends com.google.protobuf.GeneratedMessage implements MoveRegionResponseOrBuilder { // Use MoveRegionResponse.newBuilder() to construct. - private MoveRegionResponse(Builder builder) { + private MoveRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MoveRegionResponse(boolean noInit) {} - + private MoveRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MoveRegionResponse defaultInstance; public static MoveRegionResponse getDefaultInstance() { return defaultInstance; } - + public MoveRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MoveRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MoveRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MoveRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3179,101 +3802,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MoveRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponseOrBuilder { @@ -3281,18 +3898,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3303,25 +3923,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3329,23 +3949,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)other); @@ -3354,138 +3964,274 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:MoveRegionResponse) } - + static { defaultInstance = new MoveRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MoveRegionResponse) } - + public interface DispatchMergingRegionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier regionA = 1; + /** + * required .RegionSpecifier regionA = 1; + */ boolean hasRegionA(); + /** + * required .RegionSpecifier regionA = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA(); + /** + * required .RegionSpecifier regionA = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder(); - + // required .RegionSpecifier regionB = 2; + /** + * required .RegionSpecifier regionB = 2; + */ boolean hasRegionB(); + /** + * required .RegionSpecifier regionB = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB(); + /** + * required .RegionSpecifier regionB = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder(); - + // optional bool forcible = 3 [default = false]; + /** + * optional bool forcible = 3 [default = false]; + */ boolean hasForcible(); + /** + * optional bool forcible = 3 [default = false]; + */ boolean getForcible(); } + /** + * Protobuf type {@code DispatchMergingRegionsRequest} + * + *
+   **
+   * Dispatch merging the specified regions.
+   * 
+ */ public static final class DispatchMergingRegionsRequest extends com.google.protobuf.GeneratedMessage implements DispatchMergingRegionsRequestOrBuilder { // Use DispatchMergingRegionsRequest.newBuilder() to construct. - private DispatchMergingRegionsRequest(Builder builder) { + private DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DispatchMergingRegionsRequest(boolean noInit) {} - + private DispatchMergingRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DispatchMergingRegionsRequest defaultInstance; public static DispatchMergingRegionsRequest getDefaultInstance() { return defaultInstance; } - + public DispatchMergingRegionsRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DispatchMergingRegionsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = regionA_.toBuilder(); + } + regionA_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionA_); + regionA_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = regionB_.toBuilder(); + } + regionB_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(regionB_); + regionB_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 24: { + bitField0_ |= 0x00000004; + forcible_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DispatchMergingRegionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DispatchMergingRegionsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier regionA = 1; public static final int REGIONA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_; + /** + * required .RegionSpecifier regionA = 1; + */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { return regionA_; } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { return regionA_; } - + // required .RegionSpecifier regionB = 2; public static final int REGIONB_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_; + /** + * required .RegionSpecifier regionB = 2; + */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { return regionB_; } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { return regionB_; } - + // optional bool forcible = 3 [default = false]; public static final int FORCIBLE_FIELD_NUMBER = 3; private boolean forcible_; + /** + * optional bool forcible = 3 [default = false]; + */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool forcible = 3 [default = false]; + */ public boolean getForcible() { return forcible_; } - + private void initFields() { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -3495,7 +4241,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionA()) { memoizedIsInitialized = 0; return false; @@ -3515,7 +4261,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3530,12 +4276,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3553,14 +4299,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3570,7 +4316,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest) obj; - + boolean result = true; result = result && (hasRegionA() == other.hasRegionA()); if (hasRegionA()) { @@ -3591,9 +4337,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionA()) { @@ -3609,89 +4359,84 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getForcible()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DispatchMergingRegionsRequest} + * + *
+     **
+     * Dispatch merging the specified regions.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequestOrBuilder { @@ -3699,18 +4444,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3723,7 +4471,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionABuilder_ == null) { @@ -3742,20 +4490,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3763,17 +4511,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest(this); int from_bitField0_ = bitField0_; @@ -3802,7 +4540,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest)other); @@ -3811,7 +4549,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.getDefaultInstance()) return this; if (other.hasRegionA()) { @@ -3826,7 +4564,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionA()) { @@ -3846,66 +4584,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionA()) { - subBuilder.mergeFrom(getRegionA()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionA(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionB()) { - subBuilder.mergeFrom(getRegionB()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionB(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - forcible_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier regionA = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_; + /** + * required .RegionSpecifier regionA = 1; + */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { if (regionABuilder_ == null) { return regionA_; @@ -3913,6 +4624,9 @@ public final class MasterAdminProtos { return regionABuilder_.getMessage(); } } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (value == null) { @@ -3926,6 +4640,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder setRegionA( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionABuilder_ == null) { @@ -3937,6 +4654,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -3953,6 +4673,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public Builder clearRegionA() { if (regionABuilder_ == null) { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -3963,11 +4686,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionABuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionAFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier regionA = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { if (regionABuilder_ != null) { return regionABuilder_.getMessageOrBuilder(); @@ -3975,6 +4704,9 @@ public final class MasterAdminProtos { return regionA_; } } + /** + * required .RegionSpecifier regionA = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionAFieldBuilder() { @@ -3988,14 +4720,20 @@ public final class MasterAdminProtos { } return regionABuilder_; } - + // required .RegionSpecifier regionB = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_; + /** + * required .RegionSpecifier regionB = 2; + */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { if (regionBBuilder_ == null) { return regionB_; @@ -4003,6 +4741,9 @@ public final class MasterAdminProtos { return regionBBuilder_.getMessage(); } } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (value == null) { @@ -4016,6 +4757,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder setRegionB( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBBuilder_ == null) { @@ -4027,6 +4771,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -4043,6 +4790,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public Builder clearRegionB() { if (regionBBuilder_ == null) { regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -4053,11 +4803,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionBFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier regionB = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { if (regionBBuilder_ != null) { return regionBBuilder_.getMessageOrBuilder(); @@ -4065,6 +4821,9 @@ public final class MasterAdminProtos { return regionB_; } } + /** + * required .RegionSpecifier regionB = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionBFieldBuilder() { @@ -4078,105 +4837,178 @@ public final class MasterAdminProtos { } return regionBBuilder_; } - + // optional bool forcible = 3 [default = false]; private boolean forcible_ ; + /** + * optional bool forcible = 3 [default = false]; + */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bool forcible = 3 [default = false]; + */ public boolean getForcible() { return forcible_; } + /** + * optional bool forcible = 3 [default = false]; + */ public Builder setForcible(boolean value) { bitField0_ |= 0x00000004; forcible_ = value; onChanged(); return this; } + /** + * optional bool forcible = 3 [default = false]; + */ public Builder clearForcible() { bitField0_ = (bitField0_ & ~0x00000004); forcible_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsRequest) } - + static { defaultInstance = new DispatchMergingRegionsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DispatchMergingRegionsRequest) } - + public interface DispatchMergingRegionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code DispatchMergingRegionsResponse} + */ public static final class DispatchMergingRegionsResponse extends com.google.protobuf.GeneratedMessage implements DispatchMergingRegionsResponseOrBuilder { // Use DispatchMergingRegionsResponse.newBuilder() to construct. - private DispatchMergingRegionsResponse(Builder builder) { + private DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DispatchMergingRegionsResponse(boolean noInit) {} - + private DispatchMergingRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DispatchMergingRegionsResponse defaultInstance; public static DispatchMergingRegionsResponse getDefaultInstance() { return defaultInstance; } - + public DispatchMergingRegionsResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DispatchMergingRegionsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DispatchMergingRegionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DispatchMergingRegionsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4186,101 +5018,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DispatchMergingRegionsResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponseOrBuilder { @@ -4288,18 +5114,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4310,25 +5139,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -4336,23 +5165,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse)other); @@ -4361,106 +5180,189 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsResponse) } - + static { defaultInstance = new DispatchMergingRegionsResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DispatchMergingRegionsResponse) } - + public interface AssignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } + /** + * Protobuf type {@code AssignRegionRequest} + */ public static final class AssignRegionRequest extends com.google.protobuf.GeneratedMessage implements AssignRegionRequestOrBuilder { // Use AssignRegionRequest.newBuilder() to construct. - private AssignRegionRequest(Builder builder) { + private AssignRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AssignRegionRequest(boolean noInit) {} - + private AssignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AssignRegionRequest defaultInstance; public static AssignRegionRequest getDefaultInstance() { return defaultInstance; } - + public AssignRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AssignRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AssignRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AssignRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } @@ -4468,7 +5370,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -4480,7 +5382,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4489,12 +5391,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4504,14 +5406,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4521,7 +5423,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -4532,9 +5434,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -4542,89 +5448,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AssignRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequestOrBuilder { @@ -4632,18 +5528,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4655,7 +5554,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -4666,20 +5565,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -4687,17 +5586,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest(this); int from_bitField0_ = bitField0_; @@ -4714,7 +5603,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)other); @@ -4723,7 +5612,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -4732,7 +5621,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -4744,52 +5633,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -4797,6 +5673,9 @@ public final class MasterAdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -4810,6 +5689,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -4821,6 +5703,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -4837,6 +5722,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -4847,11 +5735,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -4859,6 +5753,9 @@ public final class MasterAdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -4872,84 +5769,145 @@ public final class MasterAdminProtos { } return regionBuilder_; } - + // @@protoc_insertion_point(builder_scope:AssignRegionRequest) } - + static { defaultInstance = new AssignRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AssignRegionRequest) } - + public interface AssignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code AssignRegionResponse} + */ public static final class AssignRegionResponse extends com.google.protobuf.GeneratedMessage implements AssignRegionResponseOrBuilder { // Use AssignRegionResponse.newBuilder() to construct. - private AssignRegionResponse(Builder builder) { + private AssignRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private AssignRegionResponse(boolean noInit) {} - + private AssignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final AssignRegionResponse defaultInstance; public static AssignRegionResponse getDefaultInstance() { return defaultInstance; } - + public AssignRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AssignRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AssignRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AssignRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4959,101 +5917,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code AssignRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponseOrBuilder { @@ -5061,18 +6013,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5083,25 +6038,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -5109,23 +6064,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)other); @@ -5134,120 +6079,220 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:AssignRegionResponse) } - + static { defaultInstance = new AssignRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:AssignRegionResponse) } - + public interface UnassignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - + // optional bool force = 2 [default = false]; + /** + * optional bool force = 2 [default = false]; + */ boolean hasForce(); + /** + * optional bool force = 2 [default = false]; + */ boolean getForce(); } + /** + * Protobuf type {@code UnassignRegionRequest} + */ public static final class UnassignRegionRequest extends com.google.protobuf.GeneratedMessage implements UnassignRegionRequestOrBuilder { // Use UnassignRegionRequest.newBuilder() to construct. - private UnassignRegionRequest(Builder builder) { + private UnassignRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UnassignRegionRequest(boolean noInit) {} - + private UnassignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UnassignRegionRequest defaultInstance; public static UnassignRegionRequest getDefaultInstance() { return defaultInstance; } - + public UnassignRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UnassignRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + force_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UnassignRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UnassignRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + // optional bool force = 2 [default = false]; public static final int FORCE_FIELD_NUMBER = 2; private boolean force_; + /** + * optional bool force = 2 [default = false]; + */ public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool force = 2 [default = false]; + */ public boolean getForce() { return force_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); force_ = false; @@ -5256,7 +6301,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -5268,7 +6313,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5280,12 +6325,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -5299,14 +6344,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5316,7 +6361,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -5332,9 +6377,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -5346,89 +6395,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getForce()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UnassignRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequestOrBuilder { @@ -5436,18 +6475,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5459,7 +6501,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -5472,20 +6514,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -5493,17 +6535,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest(this); int from_bitField0_ = bitField0_; @@ -5524,7 +6556,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)other); @@ -5533,7 +6565,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -5545,7 +6577,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -5557,57 +6589,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - force_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -5615,6 +6629,9 @@ public final class MasterAdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -5628,6 +6645,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -5639,6 +6659,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -5655,6 +6678,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -5665,11 +6691,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -5677,6 +6709,9 @@ public final class MasterAdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -5690,105 +6725,178 @@ public final class MasterAdminProtos { } return regionBuilder_; } - + // optional bool force = 2 [default = false]; private boolean force_ ; + /** + * optional bool force = 2 [default = false]; + */ public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool force = 2 [default = false]; + */ public boolean getForce() { return force_; } + /** + * optional bool force = 2 [default = false]; + */ public Builder setForce(boolean value) { bitField0_ |= 0x00000002; force_ = value; onChanged(); return this; } + /** + * optional bool force = 2 [default = false]; + */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000002); force_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:UnassignRegionRequest) } - + static { defaultInstance = new UnassignRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UnassignRegionRequest) } - + public interface UnassignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code UnassignRegionResponse} + */ public static final class UnassignRegionResponse extends com.google.protobuf.GeneratedMessage implements UnassignRegionResponseOrBuilder { // Use UnassignRegionResponse.newBuilder() to construct. - private UnassignRegionResponse(Builder builder) { + private UnassignRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UnassignRegionResponse(boolean noInit) {} - + private UnassignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UnassignRegionResponse defaultInstance; public static UnassignRegionResponse getDefaultInstance() { return defaultInstance; } - + public UnassignRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UnassignRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UnassignRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UnassignRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5798,101 +6906,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UnassignRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponseOrBuilder { @@ -5900,18 +7002,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5922,25 +7027,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -5948,23 +7053,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)other); @@ -5973,106 +7068,189 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:UnassignRegionResponse) } - + static { defaultInstance = new UnassignRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UnassignRegionResponse) } - + public interface OfflineRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } + /** + * Protobuf type {@code OfflineRegionRequest} + */ public static final class OfflineRegionRequest extends com.google.protobuf.GeneratedMessage implements OfflineRegionRequestOrBuilder { // Use OfflineRegionRequest.newBuilder() to construct. - private OfflineRegionRequest(Builder builder) { + private OfflineRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private OfflineRegionRequest(boolean noInit) {} - + private OfflineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final OfflineRegionRequest defaultInstance; public static OfflineRegionRequest getDefaultInstance() { return defaultInstance; } - + public OfflineRegionRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OfflineRegionRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public OfflineRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OfflineRegionRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } - + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } @@ -6080,7 +7258,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegion()) { memoizedIsInitialized = 0; return false; @@ -6092,7 +7270,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6101,12 +7279,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6116,14 +7294,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6133,7 +7311,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) obj; - + boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { @@ -6144,9 +7322,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { @@ -6154,89 +7336,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code OfflineRegionRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequestOrBuilder { @@ -6244,18 +7416,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6267,7 +7442,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -6278,20 +7453,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); if (!result.isInitialized()) { @@ -6299,17 +7474,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest(this); int from_bitField0_ = bitField0_; @@ -6326,7 +7491,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)other); @@ -6335,7 +7500,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { @@ -6344,7 +7509,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegion()) { @@ -6356,52 +7521,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; @@ -6409,6 +7561,9 @@ public final class MasterAdminProtos { return regionBuilder_.getMessage(); } } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { @@ -6422,6 +7577,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { @@ -6433,6 +7591,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -6449,6 +7610,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .RegionSpecifier region = 1; + */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); @@ -6459,11 +7623,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } + /** + * required .RegionSpecifier region = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); @@ -6471,6 +7641,9 @@ public final class MasterAdminProtos { return region_; } } + /** + * required .RegionSpecifier region = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { @@ -6484,84 +7657,145 @@ public final class MasterAdminProtos { } return regionBuilder_; } - + // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) } - + static { defaultInstance = new OfflineRegionRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:OfflineRegionRequest) } - + public interface OfflineRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code OfflineRegionResponse} + */ public static final class OfflineRegionResponse extends com.google.protobuf.GeneratedMessage implements OfflineRegionResponseOrBuilder { // Use OfflineRegionResponse.newBuilder() to construct. - private OfflineRegionResponse(Builder builder) { + private OfflineRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private OfflineRegionResponse(boolean noInit) {} - + private OfflineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final OfflineRegionResponse defaultInstance; public static OfflineRegionResponse getDefaultInstance() { return defaultInstance; } - + public OfflineRegionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OfflineRegionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public OfflineRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OfflineRegionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6571,101 +7805,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code OfflineRegionResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponseOrBuilder { @@ -6673,18 +7901,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -6695,25 +7926,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -6721,23 +7952,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)other); @@ -6746,134 +7967,246 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) } - + static { defaultInstance = new OfflineRegionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:OfflineRegionResponse) } - + public interface CreateTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .TableSchema tableSchema = 1; + /** + * required .TableSchema tableSchema = 1; + */ boolean hasTableSchema(); + /** + * required .TableSchema tableSchema = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + /** + * required .TableSchema tableSchema = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - + // repeated bytes splitKeys = 2; + /** + * repeated bytes splitKeys = 2; + */ java.util.List getSplitKeysList(); + /** + * repeated bytes splitKeys = 2; + */ int getSplitKeysCount(); + /** + * repeated bytes splitKeys = 2; + */ com.google.protobuf.ByteString getSplitKeys(int index); } + /** + * Protobuf type {@code CreateTableRequest} + */ public static final class CreateTableRequest extends com.google.protobuf.GeneratedMessage implements CreateTableRequestOrBuilder { // Use CreateTableRequest.newBuilder() to construct. - private CreateTableRequest(Builder builder) { + private CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CreateTableRequest(boolean noInit) {} - + private CreateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CreateTableRequest defaultInstance; public static CreateTableRequest getDefaultInstance() { return defaultInstance; } - + public CreateTableRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .TableSchema tableSchema = 1; - public static final int TABLESCHEMA_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; - } - - // repeated bytes splitKeys = 2; - public static final int SPLITKEYS_FIELD_NUMBER = 2; - private java.util.List splitKeys_; - public java.util.List - getSplitKeysList() { - return splitKeys_; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - public int getSplitKeysCount() { + private CreateTableRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = tableSchema_.toBuilder(); + } + tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableSchema_); + tableSchema_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + splitKeys_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CreateTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateTableRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .TableSchema tableSchema = 1; + public static final int TABLESCHEMA_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + /** + * required .TableSchema tableSchema = 1; + */ + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .TableSchema tableSchema = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + /** + * required .TableSchema tableSchema = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; + } + + // repeated bytes splitKeys = 2; + public static final int SPLITKEYS_FIELD_NUMBER = 2; + private java.util.List splitKeys_; + /** + * repeated bytes splitKeys = 2; + */ + public java.util.List + getSplitKeysList() { + return splitKeys_; + } + /** + * repeated bytes splitKeys = 2; + */ + public int getSplitKeysCount() { return splitKeys_.size(); } + /** + * repeated bytes splitKeys = 2; + */ public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } - + private void initFields() { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - splitKeys_ = java.util.Collections.emptyList();; + splitKeys_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableSchema()) { memoizedIsInitialized = 0; return false; @@ -6885,7 +8218,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -6897,12 +8230,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -6921,14 +8254,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -6938,7 +8271,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) obj; - + boolean result = true; result = result && (hasTableSchema() == other.hasTableSchema()); if (hasTableSchema()) { @@ -6951,9 +8284,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableSchema()) { @@ -6965,89 +8302,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSplitKeysList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CreateTableRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequestOrBuilder { @@ -7055,18 +8382,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7078,7 +8408,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { @@ -7087,24 +8417,24 @@ public final class MasterAdminProtos { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - splitKeys_ = java.util.Collections.emptyList();; + splitKeys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); if (!result.isInitialized()) { @@ -7112,17 +8442,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest(this); int from_bitField0_ = bitField0_; @@ -7144,7 +8464,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)other); @@ -7153,7 +8473,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance()) return this; if (other.hasTableSchema()) { @@ -7172,7 +8492,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableSchema()) { @@ -7184,57 +8504,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - if (hasTableSchema()) { - subBuilder.mergeFrom(getTableSchema()); - } - input.readMessage(subBuilder, extensionRegistry); - setTableSchema(subBuilder.buildPartial()); - break; - } - case 18: { - ensureSplitKeysIsMutable(); - splitKeys_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .TableSchema tableSchema = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + /** + * required .TableSchema tableSchema = 1; + */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; @@ -7242,6 +8544,9 @@ public final class MasterAdminProtos { return tableSchemaBuilder_.getMessage(); } } + /** + * required .TableSchema tableSchema = 1; + */ public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { @@ -7255,6 +8560,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .TableSchema tableSchema = 1; + */ public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { @@ -7266,6 +8574,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .TableSchema tableSchema = 1; + */ public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -7282,6 +8593,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .TableSchema tableSchema = 1; + */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); @@ -7292,11 +8606,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } + /** + * required .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); @@ -7304,6 +8624,9 @@ public final class MasterAdminProtos { return tableSchema_; } } + /** + * required .TableSchema tableSchema = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { @@ -7317,25 +8640,37 @@ public final class MasterAdminProtos { } return tableSchemaBuilder_; } - + // repeated bytes splitKeys = 2; - private java.util.List splitKeys_ = java.util.Collections.emptyList();; + private java.util.List splitKeys_ = java.util.Collections.emptyList(); private void ensureSplitKeysIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = new java.util.ArrayList(splitKeys_); bitField0_ |= 0x00000002; } } + /** + * repeated bytes splitKeys = 2; + */ public java.util.List getSplitKeysList() { return java.util.Collections.unmodifiableList(splitKeys_); } + /** + * repeated bytes splitKeys = 2; + */ public int getSplitKeysCount() { return splitKeys_.size(); } + /** + * repeated bytes splitKeys = 2; + */ public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } + /** + * repeated bytes splitKeys = 2; + */ public Builder setSplitKeys( int index, com.google.protobuf.ByteString value) { if (value == null) { @@ -7346,6 +8681,9 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * repeated bytes splitKeys = 2; + */ public Builder addSplitKeys(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -7355,6 +8693,9 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * repeated bytes splitKeys = 2; + */ public Builder addAllSplitKeys( java.lang.Iterable values) { ensureSplitKeysIsMutable(); @@ -7362,90 +8703,154 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * repeated bytes splitKeys = 2; + */ public Builder clearSplitKeys() { - splitKeys_ = java.util.Collections.emptyList();; + splitKeys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CreateTableRequest) } - + static { defaultInstance = new CreateTableRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CreateTableRequest) } - + public interface CreateTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CreateTableResponse} + */ public static final class CreateTableResponse extends com.google.protobuf.GeneratedMessage implements CreateTableResponseOrBuilder { // Use CreateTableResponse.newBuilder() to construct. - private CreateTableResponse(Builder builder) { + private CreateTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CreateTableResponse(boolean noInit) {} - + private CreateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CreateTableResponse defaultInstance; public static CreateTableResponse getDefaultInstance() { return defaultInstance; } - + public CreateTableResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CreateTableResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CreateTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateTableResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7455,101 +8860,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CreateTableResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponseOrBuilder { @@ -7557,18 +8956,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7579,25 +8981,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); if (!result.isInitialized()) { @@ -7605,23 +9007,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)other); @@ -7630,102 +9022,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CreateTableResponse) } - + static { defaultInstance = new CreateTableResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CreateTableResponse) } - + public interface DeleteTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); } + /** + * Protobuf type {@code DeleteTableRequest} + */ public static final class DeleteTableRequest extends com.google.protobuf.GeneratedMessage implements DeleteTableRequestOrBuilder { // Use DeleteTableRequest.newBuilder() to construct. - private DeleteTableRequest(Builder builder) { + private DeleteTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteTableRequest(boolean noInit) {} - + private DeleteTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteTableRequest defaultInstance; public static DeleteTableRequest getDefaultInstance() { return defaultInstance; } - + public DeleteTableRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteTableRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteTableRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -7733,7 +9194,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -7741,7 +9202,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -7750,12 +9211,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -7765,14 +9226,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -7782,7 +9243,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -7793,9 +9254,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -7803,89 +9268,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteTableRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequestOrBuilder { @@ -7893,18 +9348,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -7915,27 +9373,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); if (!result.isInitialized()) { @@ -7943,17 +9401,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest(this); int from_bitField0_ = bitField0_; @@ -7966,7 +9414,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)other); @@ -7975,7 +9423,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -7984,7 +9432,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -7992,49 +9440,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8044,90 +9486,154 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:DeleteTableRequest) } - + static { defaultInstance = new DeleteTableRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteTableRequest) } - + public interface DeleteTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code DeleteTableResponse} + */ public static final class DeleteTableResponse extends com.google.protobuf.GeneratedMessage implements DeleteTableResponseOrBuilder { // Use DeleteTableResponse.newBuilder() to construct. - private DeleteTableResponse(Builder builder) { + private DeleteTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteTableResponse(boolean noInit) {} - + private DeleteTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteTableResponse defaultInstance; public static DeleteTableResponse getDefaultInstance() { return defaultInstance; } - + public DeleteTableResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteTableResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteTableResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8137,101 +9643,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteTableResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponseOrBuilder { @@ -8239,18 +9739,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8261,25 +9764,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); if (!result.isInitialized()) { @@ -8287,23 +9790,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)other); @@ -8312,102 +9805,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:DeleteTableResponse) } - + static { defaultInstance = new DeleteTableResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteTableResponse) } - + public interface EnableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); } + /** + * Protobuf type {@code EnableTableRequest} + */ public static final class EnableTableRequest extends com.google.protobuf.GeneratedMessage implements EnableTableRequestOrBuilder { // Use EnableTableRequest.newBuilder() to construct. - private EnableTableRequest(Builder builder) { + private EnableTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EnableTableRequest(boolean noInit) {} - + private EnableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EnableTableRequest defaultInstance; public static EnableTableRequest getDefaultInstance() { return defaultInstance; } - + public EnableTableRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EnableTableRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EnableTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableTableRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -8415,7 +9977,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -8423,7 +9985,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -8432,12 +9994,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -8447,14 +10009,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8464,7 +10026,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -8475,9 +10037,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -8485,89 +10051,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EnableTableRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequestOrBuilder { @@ -8575,18 +10131,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8597,27 +10156,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); if (!result.isInitialized()) { @@ -8625,17 +10184,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest(this); int from_bitField0_ = bitField0_; @@ -8648,7 +10197,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)other); @@ -8657,7 +10206,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -8666,7 +10215,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -8674,49 +10223,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -8726,90 +10269,154 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:EnableTableRequest) } - + static { defaultInstance = new EnableTableRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EnableTableRequest) } - + public interface EnableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code EnableTableResponse} + */ public static final class EnableTableResponse extends com.google.protobuf.GeneratedMessage implements EnableTableResponseOrBuilder { // Use EnableTableResponse.newBuilder() to construct. - private EnableTableResponse(Builder builder) { + private EnableTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EnableTableResponse(boolean noInit) {} - + private EnableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EnableTableResponse defaultInstance; public static EnableTableResponse getDefaultInstance() { return defaultInstance; } - + public EnableTableResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EnableTableResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EnableTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableTableResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -8819,101 +10426,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EnableTableResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponseOrBuilder { @@ -8921,18 +10522,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -8943,25 +10547,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); if (!result.isInitialized()) { @@ -8969,23 +10573,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)other); @@ -8994,102 +10588,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:EnableTableResponse) } - + static { defaultInstance = new EnableTableResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EnableTableResponse) } - + public interface DisableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); } + /** + * Protobuf type {@code DisableTableRequest} + */ public static final class DisableTableRequest extends com.google.protobuf.GeneratedMessage implements DisableTableRequestOrBuilder { // Use DisableTableRequest.newBuilder() to construct. - private DisableTableRequest(Builder builder) { + private DisableTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DisableTableRequest(boolean noInit) {} - + private DisableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DisableTableRequest defaultInstance; public static DisableTableRequest getDefaultInstance() { return defaultInstance; } - + public DisableTableRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DisableTableRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DisableTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DisableTableRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -9097,7 +10760,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -9105,7 +10768,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9114,12 +10777,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9129,14 +10792,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9146,7 +10809,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -9157,9 +10820,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -9167,89 +10834,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DisableTableRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequestOrBuilder { @@ -9257,18 +10914,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9279,27 +10939,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); if (!result.isInitialized()) { @@ -9307,17 +10967,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest(this); int from_bitField0_ = bitField0_; @@ -9330,7 +10980,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)other); @@ -9339,7 +10989,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -9348,7 +10998,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -9356,49 +11006,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -9408,90 +11052,154 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:DisableTableRequest) } - + static { defaultInstance = new DisableTableRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DisableTableRequest) } - + public interface DisableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code DisableTableResponse} + */ public static final class DisableTableResponse extends com.google.protobuf.GeneratedMessage implements DisableTableResponseOrBuilder { // Use DisableTableResponse.newBuilder() to construct. - private DisableTableResponse(Builder builder) { + private DisableTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DisableTableResponse(boolean noInit) {} - + private DisableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DisableTableResponse defaultInstance; public static DisableTableResponse getDefaultInstance() { return defaultInstance; } - + public DisableTableResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DisableTableResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DisableTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DisableTableResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9501,101 +11209,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DisableTableResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponseOrBuilder { @@ -9603,18 +11305,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -9625,25 +11330,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); if (!result.isInitialized()) { @@ -9651,23 +11356,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)other); @@ -9676,120 +11371,220 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:DisableTableResponse) } - + static { defaultInstance = new DisableTableResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DisableTableResponse) } - + public interface ModifyTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); - + // required .TableSchema tableSchema = 2; + /** + * required .TableSchema tableSchema = 2; + */ boolean hasTableSchema(); + /** + * required .TableSchema tableSchema = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + /** + * required .TableSchema tableSchema = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); } + /** + * Protobuf type {@code ModifyTableRequest} + */ public static final class ModifyTableRequest extends com.google.protobuf.GeneratedMessage implements ModifyTableRequestOrBuilder { // Use ModifyTableRequest.newBuilder() to construct. - private ModifyTableRequest(Builder builder) { + private ModifyTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ModifyTableRequest(boolean noInit) {} - + private ModifyTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ModifyTableRequest defaultInstance; public static ModifyTableRequest getDefaultInstance() { return defaultInstance; } - + public ModifyTableRequest getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required .TableSchema tableSchema = 2; - public static final int TABLESCHEMA_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + private ModifyTableRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = tableSchema_.toBuilder(); + } + tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableSchema_); + tableSchema_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ModifyTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyTableRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes tableName = 1; + */ + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .TableSchema tableSchema = 2; + public static final int TABLESCHEMA_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + /** + * required .TableSchema tableSchema = 2; + */ + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required .TableSchema tableSchema = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + /** + * required .TableSchema tableSchema = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); @@ -9798,7 +11593,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -9814,7 +11609,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -9826,12 +11621,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -9845,14 +11640,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -9862,7 +11657,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -9878,9 +11673,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -9892,89 +11691,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getTableSchema().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ModifyTableRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequestOrBuilder { @@ -9982,18 +11771,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10005,7 +11797,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -10018,20 +11810,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); if (!result.isInitialized()) { @@ -10039,17 +11831,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest(this); int from_bitField0_ = bitField0_; @@ -10070,7 +11852,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)other); @@ -10079,7 +11861,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -10091,7 +11873,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -10107,58 +11889,43 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - if (hasTableSchema()) { - subBuilder.mergeFrom(getTableSchema()); - } - input.readMessage(subBuilder, extensionRegistry); - setTableSchema(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -10168,20 +11935,29 @@ public final class MasterAdminProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // required .TableSchema tableSchema = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + /** + * required .TableSchema tableSchema = 2; + */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .TableSchema tableSchema = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; @@ -10189,6 +11965,9 @@ public final class MasterAdminProtos { return tableSchemaBuilder_.getMessage(); } } + /** + * required .TableSchema tableSchema = 2; + */ public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { @@ -10202,6 +11981,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .TableSchema tableSchema = 2; + */ public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { @@ -10213,6 +11995,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .TableSchema tableSchema = 2; + */ public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -10229,6 +12014,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .TableSchema tableSchema = 2; + */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); @@ -10239,11 +12027,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .TableSchema tableSchema = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } + /** + * required .TableSchema tableSchema = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); @@ -10251,6 +12045,9 @@ public final class MasterAdminProtos { return tableSchema_; } } + /** + * required .TableSchema tableSchema = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { @@ -10264,84 +12061,145 @@ public final class MasterAdminProtos { } return tableSchemaBuilder_; } - + // @@protoc_insertion_point(builder_scope:ModifyTableRequest) } - + static { defaultInstance = new ModifyTableRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ModifyTableRequest) } - + public interface ModifyTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ModifyTableResponse} + */ public static final class ModifyTableResponse extends com.google.protobuf.GeneratedMessage implements ModifyTableResponseOrBuilder { // Use ModifyTableResponse.newBuilder() to construct. - private ModifyTableResponse(Builder builder) { + private ModifyTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ModifyTableResponse(boolean noInit) {} - + private ModifyTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ModifyTableResponse defaultInstance; public static ModifyTableResponse getDefaultInstance() { return defaultInstance; } - + public ModifyTableResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ModifyTableResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ModifyTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyTableResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10351,101 +12209,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ModifyTableResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponseOrBuilder { @@ -10453,18 +12305,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10475,25 +12330,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); if (!result.isInitialized()) { @@ -10501,23 +12356,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)other); @@ -10526,122 +12371,173 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ModifyTableResponse) } - + static { defaultInstance = new ModifyTableResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ModifyTableResponse) } - + public interface ShutdownRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ShutdownRequest} + */ public static final class ShutdownRequest extends com.google.protobuf.GeneratedMessage implements ShutdownRequestOrBuilder { // Use ShutdownRequest.newBuilder() to construct. - private ShutdownRequest(Builder builder) { + private ShutdownRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ShutdownRequest(boolean noInit) {} - + private ShutdownRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ShutdownRequest defaultInstance; public static ShutdownRequest getDefaultInstance() { return defaultInstance; } - + public ShutdownRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ShutdownRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ShutdownRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ShutdownRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10651,101 +12547,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ShutdownRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequestOrBuilder { @@ -10753,18 +12643,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -10775,25 +12668,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); if (!result.isInitialized()) { @@ -10801,23 +12694,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)other); @@ -10826,122 +12709,173 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ShutdownRequest) } - + static { defaultInstance = new ShutdownRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ShutdownRequest) } - + public interface ShutdownResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ShutdownResponse} + */ public static final class ShutdownResponse extends com.google.protobuf.GeneratedMessage implements ShutdownResponseOrBuilder { // Use ShutdownResponse.newBuilder() to construct. - private ShutdownResponse(Builder builder) { + private ShutdownResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ShutdownResponse(boolean noInit) {} - + private ShutdownResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ShutdownResponse defaultInstance; public static ShutdownResponse getDefaultInstance() { return defaultInstance; } - + public ShutdownResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ShutdownResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ShutdownResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ShutdownResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -10951,101 +12885,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ShutdownResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponseOrBuilder { @@ -11053,18 +12981,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11075,25 +13006,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); if (!result.isInitialized()) { @@ -11101,23 +13032,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)other); @@ -11126,122 +13047,173 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ShutdownResponse) } - + static { defaultInstance = new ShutdownResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ShutdownResponse) } - + public interface StopMasterRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code StopMasterRequest} + */ public static final class StopMasterRequest extends com.google.protobuf.GeneratedMessage implements StopMasterRequestOrBuilder { // Use StopMasterRequest.newBuilder() to construct. - private StopMasterRequest(Builder builder) { + private StopMasterRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private StopMasterRequest(boolean noInit) {} - + private StopMasterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final StopMasterRequest defaultInstance; public static StopMasterRequest getDefaultInstance() { return defaultInstance; } - + public StopMasterRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private StopMasterRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StopMasterRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopMasterRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11251,101 +13223,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code StopMasterRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequestOrBuilder { @@ -11353,18 +13319,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11375,25 +13344,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); if (!result.isInitialized()) { @@ -11401,23 +13370,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)other); @@ -11426,122 +13385,173 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:StopMasterRequest) } - + static { defaultInstance = new StopMasterRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:StopMasterRequest) } - + public interface StopMasterResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code StopMasterResponse} + */ public static final class StopMasterResponse extends com.google.protobuf.GeneratedMessage implements StopMasterResponseOrBuilder { // Use StopMasterResponse.newBuilder() to construct. - private StopMasterResponse(Builder builder) { + private StopMasterResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private StopMasterResponse(boolean noInit) {} - + private StopMasterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final StopMasterResponse defaultInstance; public static StopMasterResponse getDefaultInstance() { return defaultInstance; } - + public StopMasterResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private StopMasterResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public StopMasterResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopMasterResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11551,101 +13561,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code StopMasterResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponseOrBuilder { @@ -11653,18 +13657,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11675,25 +13682,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); if (!result.isInitialized()) { @@ -11701,23 +13708,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)other); @@ -11726,122 +13723,173 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:StopMasterResponse) } - + static { defaultInstance = new StopMasterResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:StopMasterResponse) } - + public interface BalanceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code BalanceRequest} + */ public static final class BalanceRequest extends com.google.protobuf.GeneratedMessage implements BalanceRequestOrBuilder { // Use BalanceRequest.newBuilder() to construct. - private BalanceRequest(Builder builder) { + private BalanceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BalanceRequest(boolean noInit) {} - + private BalanceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BalanceRequest defaultInstance; public static BalanceRequest getDefaultInstance() { return defaultInstance; } - + public BalanceRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BalanceRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BalanceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BalanceRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -11851,101 +13899,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BalanceRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequestOrBuilder { @@ -11953,18 +13995,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -11975,25 +14020,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); if (!result.isInitialized()) { @@ -12001,23 +14046,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)other); @@ -12026,102 +14061,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:BalanceRequest) } - + static { defaultInstance = new BalanceRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BalanceRequest) } - + public interface BalanceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool balancerRan = 1; + /** + * required bool balancerRan = 1; + */ boolean hasBalancerRan(); + /** + * required bool balancerRan = 1; + */ boolean getBalancerRan(); } + /** + * Protobuf type {@code BalanceResponse} + */ public static final class BalanceResponse extends com.google.protobuf.GeneratedMessage implements BalanceResponseOrBuilder { // Use BalanceResponse.newBuilder() to construct. - private BalanceResponse(Builder builder) { + private BalanceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private BalanceResponse(boolean noInit) {} - + private BalanceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final BalanceResponse defaultInstance; public static BalanceResponse getDefaultInstance() { return defaultInstance; } - + public BalanceResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BalanceResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + balancerRan_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BalanceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BalanceResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required bool balancerRan = 1; public static final int BALANCERRAN_FIELD_NUMBER = 1; private boolean balancerRan_; + /** + * required bool balancerRan = 1; + */ public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool balancerRan = 1; + */ public boolean getBalancerRan() { return balancerRan_; } - + private void initFields() { balancerRan_ = false; } @@ -12129,7 +14233,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasBalancerRan()) { memoizedIsInitialized = 0; return false; @@ -12137,7 +14241,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12146,12 +14250,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12161,14 +14265,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12178,7 +14282,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) obj; - + boolean result = true; result = result && (hasBalancerRan() == other.hasBalancerRan()); if (hasBalancerRan()) { @@ -12189,9 +14293,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerRan()) { @@ -12199,89 +14307,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getBalancerRan()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code BalanceResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponseOrBuilder { @@ -12289,18 +14387,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12311,27 +14412,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); balancerRan_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); if (!result.isInitialized()) { @@ -12339,17 +14440,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse(this); int from_bitField0_ = bitField0_; @@ -12362,7 +14453,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)other); @@ -12371,7 +14462,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()) return this; if (other.hasBalancerRan()) { @@ -12380,7 +14471,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasBalancerRan()) { @@ -12388,133 +14479,226 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - balancerRan_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool balancerRan = 1; private boolean balancerRan_ ; + /** + * required bool balancerRan = 1; + */ public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool balancerRan = 1; + */ public boolean getBalancerRan() { return balancerRan_; } + /** + * required bool balancerRan = 1; + */ public Builder setBalancerRan(boolean value) { bitField0_ |= 0x00000001; balancerRan_ = value; onChanged(); return this; } + /** + * required bool balancerRan = 1; + */ public Builder clearBalancerRan() { bitField0_ = (bitField0_ & ~0x00000001); balancerRan_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:BalanceResponse) } - + static { defaultInstance = new BalanceResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:BalanceResponse) } - + public interface SetBalancerRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool on = 1; + /** + * required bool on = 1; + */ boolean hasOn(); + /** + * required bool on = 1; + */ boolean getOn(); - + // optional bool synchronous = 2; + /** + * optional bool synchronous = 2; + */ boolean hasSynchronous(); + /** + * optional bool synchronous = 2; + */ boolean getSynchronous(); } + /** + * Protobuf type {@code SetBalancerRunningRequest} + */ public static final class SetBalancerRunningRequest extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningRequestOrBuilder { // Use SetBalancerRunningRequest.newBuilder() to construct. - private SetBalancerRunningRequest(Builder builder) { + private SetBalancerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SetBalancerRunningRequest(boolean noInit) {} - + private SetBalancerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SetBalancerRunningRequest defaultInstance; public static SetBalancerRunningRequest getDefaultInstance() { return defaultInstance; } - + public SetBalancerRunningRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetBalancerRunningRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + on_ = input.readBool(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + synchronous_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetBalancerRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetBalancerRunningRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool on = 1; public static final int ON_FIELD_NUMBER = 1; private boolean on_; + /** + * required bool on = 1; + */ public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool on = 1; + */ public boolean getOn() { return on_; } - + // optional bool synchronous = 2; public static final int SYNCHRONOUS_FIELD_NUMBER = 2; private boolean synchronous_; + /** + * optional bool synchronous = 2; + */ public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool synchronous = 2; + */ public boolean getSynchronous() { return synchronous_; } - + private void initFields() { on_ = false; synchronous_ = false; @@ -12523,7 +14707,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasOn()) { memoizedIsInitialized = 0; return false; @@ -12531,7 +14715,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12543,12 +14727,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12562,14 +14746,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12579,7 +14763,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) obj; - + boolean result = true; result = result && (hasOn() == other.hasOn()); if (hasOn()) { @@ -12595,9 +14779,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOn()) { @@ -12609,89 +14797,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getSynchronous()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SetBalancerRunningRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequestOrBuilder { @@ -12699,18 +14877,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -12721,7 +14902,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); on_ = false; @@ -12730,20 +14911,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); if (!result.isInitialized()) { @@ -12751,17 +14932,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest(this); int from_bitField0_ = bitField0_; @@ -12778,7 +14949,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)other); @@ -12787,7 +14958,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance()) return this; if (other.hasOn()) { @@ -12799,7 +14970,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasOn()) { @@ -12807,145 +14978,228 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - on_ = input.readBool(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - synchronous_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool on = 1; private boolean on_ ; + /** + * required bool on = 1; + */ public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool on = 1; + */ public boolean getOn() { return on_; } + /** + * required bool on = 1; + */ public Builder setOn(boolean value) { bitField0_ |= 0x00000001; on_ = value; onChanged(); return this; } + /** + * required bool on = 1; + */ public Builder clearOn() { bitField0_ = (bitField0_ & ~0x00000001); on_ = false; onChanged(); return this; } - + // optional bool synchronous = 2; private boolean synchronous_ ; + /** + * optional bool synchronous = 2; + */ public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool synchronous = 2; + */ public boolean getSynchronous() { return synchronous_; } + /** + * optional bool synchronous = 2; + */ public Builder setSynchronous(boolean value) { bitField0_ |= 0x00000002; synchronous_ = value; onChanged(); return this; } + /** + * optional bool synchronous = 2; + */ public Builder clearSynchronous() { bitField0_ = (bitField0_ & ~0x00000002); synchronous_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest) } - + static { defaultInstance = new SetBalancerRunningRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest) } - + public interface SetBalancerRunningResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bool prevBalanceValue = 1; + /** + * optional bool prevBalanceValue = 1; + */ boolean hasPrevBalanceValue(); + /** + * optional bool prevBalanceValue = 1; + */ boolean getPrevBalanceValue(); } + /** + * Protobuf type {@code SetBalancerRunningResponse} + */ public static final class SetBalancerRunningResponse extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningResponseOrBuilder { // Use SetBalancerRunningResponse.newBuilder() to construct. - private SetBalancerRunningResponse(Builder builder) { + private SetBalancerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SetBalancerRunningResponse(boolean noInit) {} - + private SetBalancerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SetBalancerRunningResponse defaultInstance; public static SetBalancerRunningResponse getDefaultInstance() { return defaultInstance; } - + public SetBalancerRunningResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetBalancerRunningResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevBalanceValue_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetBalancerRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetBalancerRunningResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional bool prevBalanceValue = 1; public static final int PREVBALANCEVALUE_FIELD_NUMBER = 1; private boolean prevBalanceValue_; + /** + * optional bool prevBalanceValue = 1; + */ public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool prevBalanceValue = 1; + */ public boolean getPrevBalanceValue() { return prevBalanceValue_; } - + private void initFields() { prevBalanceValue_ = false; } @@ -12953,11 +15207,11 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -12966,12 +15220,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -12981,14 +15235,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -12998,7 +15252,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) obj; - + boolean result = true; result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue()); if (hasPrevBalanceValue()) { @@ -13009,9 +15263,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevBalanceValue()) { @@ -13019,89 +15277,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SetBalancerRunningResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponseOrBuilder { @@ -13109,18 +15357,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13131,27 +15382,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); prevBalanceValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); if (!result.isInitialized()) { @@ -13159,17 +15410,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse(this); int from_bitField0_ = bitField0_; @@ -13182,7 +15423,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)other); @@ -13191,7 +15432,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()) return this; if (other.hasPrevBalanceValue()) { @@ -13200,143 +15441,201 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - prevBalanceValue_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bool prevBalanceValue = 1; private boolean prevBalanceValue_ ; + /** + * optional bool prevBalanceValue = 1; + */ public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool prevBalanceValue = 1; + */ public boolean getPrevBalanceValue() { return prevBalanceValue_; } + /** + * optional bool prevBalanceValue = 1; + */ public Builder setPrevBalanceValue(boolean value) { bitField0_ |= 0x00000001; prevBalanceValue_ = value; onChanged(); return this; } + /** + * optional bool prevBalanceValue = 1; + */ public Builder clearPrevBalanceValue() { bitField0_ = (bitField0_ & ~0x00000001); prevBalanceValue_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse) } - + static { defaultInstance = new SetBalancerRunningResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) } - + public interface CatalogScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CatalogScanRequest} + */ public static final class CatalogScanRequest extends com.google.protobuf.GeneratedMessage implements CatalogScanRequestOrBuilder { // Use CatalogScanRequest.newBuilder() to construct. - private CatalogScanRequest(Builder builder) { + private CatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CatalogScanRequest(boolean noInit) {} - + private CatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CatalogScanRequest defaultInstance; public static CatalogScanRequest getDefaultInstance() { return defaultInstance; } - + public CatalogScanRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CatalogScanRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CatalogScanRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CatalogScanRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13346,101 +15645,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CatalogScanRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequestOrBuilder { @@ -13448,18 +15741,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13470,49 +15766,39 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); + throw newUninitializedMessageException(result); } return result; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)other); @@ -13521,102 +15807,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CatalogScanRequest) } - + static { defaultInstance = new CatalogScanRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CatalogScanRequest) } - + public interface CatalogScanResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional int32 scanResult = 1; + /** + * optional int32 scanResult = 1; + */ boolean hasScanResult(); + /** + * optional int32 scanResult = 1; + */ int getScanResult(); } + /** + * Protobuf type {@code CatalogScanResponse} + */ public static final class CatalogScanResponse extends com.google.protobuf.GeneratedMessage implements CatalogScanResponseOrBuilder { // Use CatalogScanResponse.newBuilder() to construct. - private CatalogScanResponse(Builder builder) { + private CatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CatalogScanResponse(boolean noInit) {} - + private CatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CatalogScanResponse defaultInstance; public static CatalogScanResponse getDefaultInstance() { return defaultInstance; } - + public CatalogScanResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CatalogScanResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + scanResult_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CatalogScanResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CatalogScanResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional int32 scanResult = 1; public static final int SCANRESULT_FIELD_NUMBER = 1; private int scanResult_; + /** + * optional int32 scanResult = 1; + */ public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional int32 scanResult = 1; + */ public int getScanResult() { return scanResult_; } - + private void initFields() { scanResult_ = 0; } @@ -13624,11 +15979,11 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -13637,12 +15992,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -13652,14 +16007,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -13669,7 +16024,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) obj; - + boolean result = true; result = result && (hasScanResult() == other.hasScanResult()); if (hasScanResult()) { @@ -13680,9 +16035,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScanResult()) { @@ -13690,89 +16049,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getScanResult(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CatalogScanResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponseOrBuilder { @@ -13780,18 +16129,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -13802,27 +16154,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); scanResult_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); if (!result.isInitialized()) { @@ -13830,17 +16182,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse(this); int from_bitField0_ = bitField0_; @@ -13853,7 +16195,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)other); @@ -13862,7 +16204,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()) return this; if (other.hasScanResult()) { @@ -13871,123 +16213,199 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - scanResult_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional int32 scanResult = 1; private int scanResult_ ; + /** + * optional int32 scanResult = 1; + */ public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional int32 scanResult = 1; + */ public int getScanResult() { return scanResult_; } + /** + * optional int32 scanResult = 1; + */ public Builder setScanResult(int value) { bitField0_ |= 0x00000001; scanResult_ = value; onChanged(); return this; } + /** + * optional int32 scanResult = 1; + */ public Builder clearScanResult() { bitField0_ = (bitField0_ & ~0x00000001); scanResult_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CatalogScanResponse) } - + static { defaultInstance = new CatalogScanResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CatalogScanResponse) } - + public interface EnableCatalogJanitorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool enable = 1; + /** + * required bool enable = 1; + */ boolean hasEnable(); + /** + * required bool enable = 1; + */ boolean getEnable(); } + /** + * Protobuf type {@code EnableCatalogJanitorRequest} + */ public static final class EnableCatalogJanitorRequest extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorRequestOrBuilder { // Use EnableCatalogJanitorRequest.newBuilder() to construct. - private EnableCatalogJanitorRequest(Builder builder) { + private EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EnableCatalogJanitorRequest(boolean noInit) {} - + private EnableCatalogJanitorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EnableCatalogJanitorRequest defaultInstance; public static EnableCatalogJanitorRequest getDefaultInstance() { return defaultInstance; } - + public EnableCatalogJanitorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EnableCatalogJanitorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + enable_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EnableCatalogJanitorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableCatalogJanitorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool enable = 1; public static final int ENABLE_FIELD_NUMBER = 1; private boolean enable_; + /** + * required bool enable = 1; + */ public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool enable = 1; + */ public boolean getEnable() { return enable_; } - + private void initFields() { enable_ = false; } @@ -13995,7 +16413,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasEnable()) { memoizedIsInitialized = 0; return false; @@ -14003,7 +16421,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -14012,12 +16430,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -14027,14 +16445,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14044,7 +16462,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) obj; - + boolean result = true; result = result && (hasEnable() == other.hasEnable()); if (hasEnable()) { @@ -14055,9 +16473,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnable()) { @@ -14065,89 +16487,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getEnable()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EnableCatalogJanitorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequestOrBuilder { @@ -14155,18 +16567,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14177,27 +16592,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); enable_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -14205,17 +16620,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest(this); int from_bitField0_ = bitField0_; @@ -14228,7 +16633,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)other); @@ -14237,7 +16642,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this; if (other.hasEnable()) { @@ -14246,7 +16651,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasEnable()) { @@ -14254,119 +16659,195 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - enable_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool enable = 1; private boolean enable_ ; + /** + * required bool enable = 1; + */ public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool enable = 1; + */ public boolean getEnable() { return enable_; } + /** + * required bool enable = 1; + */ public Builder setEnable(boolean value) { bitField0_ |= 0x00000001; enable_ = value; onChanged(); return this; } + /** + * required bool enable = 1; + */ public Builder clearEnable() { bitField0_ = (bitField0_ & ~0x00000001); enable_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest) } - + static { defaultInstance = new EnableCatalogJanitorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest) } - + public interface EnableCatalogJanitorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bool prevValue = 1; + /** + * optional bool prevValue = 1; + */ boolean hasPrevValue(); + /** + * optional bool prevValue = 1; + */ boolean getPrevValue(); } + /** + * Protobuf type {@code EnableCatalogJanitorResponse} + */ public static final class EnableCatalogJanitorResponse extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorResponseOrBuilder { // Use EnableCatalogJanitorResponse.newBuilder() to construct. - private EnableCatalogJanitorResponse(Builder builder) { + private EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EnableCatalogJanitorResponse(boolean noInit) {} - + private EnableCatalogJanitorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EnableCatalogJanitorResponse defaultInstance; public static EnableCatalogJanitorResponse getDefaultInstance() { return defaultInstance; } - + public EnableCatalogJanitorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EnableCatalogJanitorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevValue_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EnableCatalogJanitorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableCatalogJanitorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bool prevValue = 1; public static final int PREVVALUE_FIELD_NUMBER = 1; private boolean prevValue_; + /** + * optional bool prevValue = 1; + */ public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool prevValue = 1; + */ public boolean getPrevValue() { return prevValue_; } - + private void initFields() { prevValue_ = false; } @@ -14374,11 +16855,11 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -14387,12 +16868,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -14402,14 +16883,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14419,7 +16900,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) obj; - + boolean result = true; result = result && (hasPrevValue() == other.hasPrevValue()); if (hasPrevValue()) { @@ -14430,9 +16911,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevValue()) { @@ -14440,89 +16925,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getPrevValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EnableCatalogJanitorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponseOrBuilder { @@ -14530,18 +17005,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14552,27 +17030,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); prevValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -14580,17 +17058,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse(this); int from_bitField0_ = bitField0_; @@ -14603,7 +17071,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)other); @@ -14612,7 +17080,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this; if (other.hasPrevValue()) { @@ -14621,143 +17089,201 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - prevValue_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bool prevValue = 1; private boolean prevValue_ ; + /** + * optional bool prevValue = 1; + */ public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool prevValue = 1; + */ public boolean getPrevValue() { return prevValue_; } + /** + * optional bool prevValue = 1; + */ public Builder setPrevValue(boolean value) { bitField0_ |= 0x00000001; prevValue_ = value; onChanged(); return this; } + /** + * optional bool prevValue = 1; + */ public Builder clearPrevValue() { bitField0_ = (bitField0_ & ~0x00000001); prevValue_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse) } - + static { defaultInstance = new EnableCatalogJanitorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse) } - + public interface IsCatalogJanitorEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code IsCatalogJanitorEnabledRequest} + */ public static final class IsCatalogJanitorEnabledRequest extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledRequestOrBuilder { // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. - private IsCatalogJanitorEnabledRequest(Builder builder) { + private IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsCatalogJanitorEnabledRequest(boolean noInit) {} - + private IsCatalogJanitorEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsCatalogJanitorEnabledRequest defaultInstance; public static IsCatalogJanitorEnabledRequest getDefaultInstance() { return defaultInstance; } - + public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsCatalogJanitorEnabledRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsCatalogJanitorEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsCatalogJanitorEnabledRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -14767,101 +17293,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsCatalogJanitorEnabledRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequestOrBuilder { @@ -14869,18 +17389,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -14891,25 +17414,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); if (!result.isInitialized()) { @@ -14917,23 +17440,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)other); @@ -14942,102 +17455,171 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest) } - + static { defaultInstance = new IsCatalogJanitorEnabledRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest) } - + public interface IsCatalogJanitorEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool value = 1; + /** + * required bool value = 1; + */ boolean hasValue(); + /** + * required bool value = 1; + */ boolean getValue(); } + /** + * Protobuf type {@code IsCatalogJanitorEnabledResponse} + */ public static final class IsCatalogJanitorEnabledResponse extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledResponseOrBuilder { // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. - private IsCatalogJanitorEnabledResponse(Builder builder) { + private IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsCatalogJanitorEnabledResponse(boolean noInit) {} - + private IsCatalogJanitorEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsCatalogJanitorEnabledResponse defaultInstance; public static IsCatalogJanitorEnabledResponse getDefaultInstance() { return defaultInstance; } - + public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsCatalogJanitorEnabledResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + value_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsCatalogJanitorEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsCatalogJanitorEnabledResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool value = 1; public static final int VALUE_FIELD_NUMBER = 1; private boolean value_; + /** + * required bool value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool value = 1; + */ public boolean getValue() { return value_; } - + private void initFields() { value_ = false; } @@ -15045,7 +17627,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasValue()) { memoizedIsInitialized = 0; return false; @@ -15053,7 +17635,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15062,12 +17644,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15077,14 +17659,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15094,7 +17676,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) obj; - + boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { @@ -15105,9 +17687,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { @@ -15115,89 +17701,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsCatalogJanitorEnabledResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponseOrBuilder { @@ -15205,18 +17781,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -15227,27 +17806,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); value_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); if (!result.isInitialized()) { @@ -15255,17 +17834,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse(this); int from_bitField0_ = bitField0_; @@ -15278,7 +17847,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)other); @@ -15287,7 +17856,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this; if (other.hasValue()) { @@ -15296,7 +17865,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasValue()) { @@ -15304,123 +17873,213 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - value_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool value = 1; private boolean value_ ; + /** + * required bool value = 1; + */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool value = 1; + */ public boolean getValue() { return value_; } + /** + * required bool value = 1; + */ public Builder setValue(boolean value) { bitField0_ |= 0x00000001; value_ = value; onChanged(); return this; } + /** + * required bool value = 1; + */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse) } - + static { defaultInstance = new IsCatalogJanitorEnabledResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) } - + public interface TakeSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .SnapshotDescription snapshot = 1; + /** + * required .SnapshotDescription snapshot = 1; + */ boolean hasSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code TakeSnapshotRequest} + */ public static final class TakeSnapshotRequest extends com.google.protobuf.GeneratedMessage implements TakeSnapshotRequestOrBuilder { // Use TakeSnapshotRequest.newBuilder() to construct. - private TakeSnapshotRequest(Builder builder) { + private TakeSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TakeSnapshotRequest(boolean noInit) {} - + private TakeSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TakeSnapshotRequest defaultInstance; public static TakeSnapshotRequest getDefaultInstance() { return defaultInstance; } - + public TakeSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TakeSnapshotRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TakeSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TakeSnapshotRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } @@ -15428,7 +18087,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; @@ -15440,7 +18099,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15449,12 +18108,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15464,14 +18123,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15481,7 +18140,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) obj; - + boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { @@ -15492,9 +18151,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { @@ -15502,89 +18165,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TakeSnapshotRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequestOrBuilder { @@ -15592,18 +18245,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -15615,7 +18271,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { @@ -15626,20 +18282,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { @@ -15647,17 +18303,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest(this); int from_bitField0_ = bitField0_; @@ -15674,7 +18320,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest)other); @@ -15683,7 +18329,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { @@ -15692,7 +18338,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSnapshot()) { @@ -15704,52 +18350,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -15757,6 +18390,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -15770,6 +18406,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -15781,6 +18420,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -15797,6 +18439,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -15807,11 +18452,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -15819,6 +18470,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * required .SnapshotDescription snapshot = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -15832,64 +18486,143 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:TakeSnapshotRequest) } - + static { defaultInstance = new TakeSnapshotRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TakeSnapshotRequest) } - + public interface TakeSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int64 expectedTimeout = 1; + /** + * required int64 expectedTimeout = 1; + */ boolean hasExpectedTimeout(); + /** + * required int64 expectedTimeout = 1; + */ long getExpectedTimeout(); } + /** + * Protobuf type {@code TakeSnapshotResponse} + */ public static final class TakeSnapshotResponse extends com.google.protobuf.GeneratedMessage implements TakeSnapshotResponseOrBuilder { // Use TakeSnapshotResponse.newBuilder() to construct. - private TakeSnapshotResponse(Builder builder) { + private TakeSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TakeSnapshotResponse(boolean noInit) {} - + private TakeSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TakeSnapshotResponse defaultInstance; public static TakeSnapshotResponse getDefaultInstance() { return defaultInstance; } - + public TakeSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TakeSnapshotResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + expectedTimeout_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TakeSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TakeSnapshotResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int64 expectedTimeout = 1; public static final int EXPECTEDTIMEOUT_FIELD_NUMBER = 1; private long expectedTimeout_; + /** + * required int64 expectedTimeout = 1; + */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 expectedTimeout = 1; + */ public long getExpectedTimeout() { return expectedTimeout_; } - + private void initFields() { expectedTimeout_ = 0L; } @@ -15897,7 +18630,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasExpectedTimeout()) { memoizedIsInitialized = 0; return false; @@ -15905,7 +18638,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -15914,12 +18647,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -15929,14 +18662,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -15946,7 +18679,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) obj; - + boolean result = true; result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); if (hasExpectedTimeout()) { @@ -15957,9 +18690,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExpectedTimeout()) { @@ -15967,89 +18704,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashLong(getExpectedTimeout()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TakeSnapshotResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponseOrBuilder { @@ -16057,18 +18784,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16079,27 +18809,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); expectedTimeout_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { @@ -16107,17 +18837,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse(this); int from_bitField0_ = bitField0_; @@ -16130,7 +18850,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse)other); @@ -16139,7 +18859,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance()) return this; if (other.hasExpectedTimeout()) { @@ -16148,7 +18868,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasExpectedTimeout()) { @@ -16156,139 +18876,197 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - expectedTimeout_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int64 expectedTimeout = 1; private long expectedTimeout_ ; + /** + * required int64 expectedTimeout = 1; + */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 expectedTimeout = 1; + */ public long getExpectedTimeout() { return expectedTimeout_; } + /** + * required int64 expectedTimeout = 1; + */ public Builder setExpectedTimeout(long value) { bitField0_ |= 0x00000001; expectedTimeout_ = value; onChanged(); return this; } + /** + * required int64 expectedTimeout = 1; + */ public Builder clearExpectedTimeout() { bitField0_ = (bitField0_ & ~0x00000001); expectedTimeout_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TakeSnapshotResponse) } - + static { defaultInstance = new TakeSnapshotResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TakeSnapshotResponse) } - + public interface ListSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ListSnapshotRequest} + */ public static final class ListSnapshotRequest extends com.google.protobuf.GeneratedMessage implements ListSnapshotRequestOrBuilder { // Use ListSnapshotRequest.newBuilder() to construct. - private ListSnapshotRequest(Builder builder) { + private ListSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ListSnapshotRequest(boolean noInit) {} - + private ListSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ListSnapshotRequest defaultInstance; public static ListSnapshotRequest getDefaultInstance() { return defaultInstance; } - + public ListSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListSnapshotRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ListSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListSnapshotRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -16298,101 +19076,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ListSnapshotRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequestOrBuilder { @@ -16400,18 +19172,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16422,25 +19197,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { @@ -16448,23 +19223,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest)other); @@ -16473,118 +19238,211 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ListSnapshotRequest) } - + static { defaultInstance = new ListSnapshotRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ListSnapshotRequest) } - + public interface ListSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .SnapshotDescription snapshots = 1; + /** + * repeated .SnapshotDescription snapshots = 1; + */ java.util.List getSnapshotsList(); + /** + * repeated .SnapshotDescription snapshots = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index); + /** + * repeated .SnapshotDescription snapshots = 1; + */ int getSnapshotsCount(); + /** + * repeated .SnapshotDescription snapshots = 1; + */ java.util.List getSnapshotsOrBuilderList(); + /** + * repeated .SnapshotDescription snapshots = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index); } + /** + * Protobuf type {@code ListSnapshotResponse} + */ public static final class ListSnapshotResponse extends com.google.protobuf.GeneratedMessage implements ListSnapshotResponseOrBuilder { // Use ListSnapshotResponse.newBuilder() to construct. - private ListSnapshotResponse(Builder builder) { + private ListSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ListSnapshotResponse(boolean noInit) {} - + private ListSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ListSnapshotResponse defaultInstance; public static ListSnapshotResponse getDefaultInstance() { return defaultInstance; } - + public ListSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListSnapshotResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + snapshots_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + snapshots_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + snapshots_ = java.util.Collections.unmodifiableList(snapshots_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ListSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListSnapshotResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .SnapshotDescription snapshots = 1; public static final int SNAPSHOTS_FIELD_NUMBER = 1; private java.util.List snapshots_; + /** + * repeated .SnapshotDescription snapshots = 1; + */ public java.util.List getSnapshotsList() { return snapshots_; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public java.util.List getSnapshotsOrBuilderList() { return snapshots_; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public int getSnapshotsCount() { return snapshots_.size(); } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { return snapshots_.get(index); } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { return snapshots_.get(index); } - + private void initFields() { snapshots_ = java.util.Collections.emptyList(); } @@ -16592,7 +19450,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { memoizedIsInitialized = 0; @@ -16602,7 +19460,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -16611,12 +19469,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < snapshots_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -16626,14 +19484,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -16643,7 +19501,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) obj; - + boolean result = true; result = result && getSnapshotsList() .equals(other.getSnapshotsList()); @@ -16651,9 +19509,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getSnapshotsCount() > 0) { @@ -16661,89 +19523,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshotsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ListSnapshotResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponseOrBuilder { @@ -16751,18 +19603,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -16774,7 +19629,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotsBuilder_ == null) { @@ -16785,20 +19640,20 @@ public final class MasterAdminProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { @@ -16806,17 +19661,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse(this); int from_bitField0_ = bitField0_; @@ -16832,7 +19677,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse)other); @@ -16841,7 +19686,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance()) return this; if (snapshotsBuilder_ == null) { @@ -16873,7 +19718,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { @@ -16883,42 +19728,26 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addSnapshots(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .SnapshotDescription snapshots = 1; private java.util.List snapshots_ = java.util.Collections.emptyList(); @@ -16928,10 +19757,13 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_; - + + /** + * repeated .SnapshotDescription snapshots = 1; + */ public java.util.List getSnapshotsList() { if (snapshotsBuilder_ == null) { return java.util.Collections.unmodifiableList(snapshots_); @@ -16939,6 +19771,9 @@ public final class MasterAdminProtos { return snapshotsBuilder_.getMessageList(); } } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public int getSnapshotsCount() { if (snapshotsBuilder_ == null) { return snapshots_.size(); @@ -16946,6 +19781,9 @@ public final class MasterAdminProtos { return snapshotsBuilder_.getCount(); } } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { if (snapshotsBuilder_ == null) { return snapshots_.get(index); @@ -16953,6 +19791,9 @@ public final class MasterAdminProtos { return snapshotsBuilder_.getMessage(index); } } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { @@ -16967,6 +19808,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { @@ -16978,6 +19822,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder addSnapshots(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { @@ -16991,6 +19838,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { @@ -17005,6 +19855,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder addSnapshots( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { @@ -17016,6 +19869,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { @@ -17027,6 +19883,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder addAllSnapshots( java.lang.Iterable values) { if (snapshotsBuilder_ == null) { @@ -17038,6 +19897,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder clearSnapshots() { if (snapshotsBuilder_ == null) { snapshots_ = java.util.Collections.emptyList(); @@ -17048,6 +19910,9 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public Builder removeSnapshots(int index) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); @@ -17058,10 +19923,16 @@ public final class MasterAdminProtos { } return this; } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().getBuilder(index); } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { if (snapshotsBuilder_ == null) { @@ -17069,6 +19940,9 @@ public final class MasterAdminProtos { return snapshotsBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public java.util.List getSnapshotsOrBuilderList() { if (snapshotsBuilder_ != null) { @@ -17077,15 +19951,24 @@ public final class MasterAdminProtos { return java.util.Collections.unmodifiableList(snapshots_); } } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder() { return getSnapshotsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); } + /** + * repeated .SnapshotDescription snapshots = 1; + */ public java.util.List getSnapshotsBuilderList() { return getSnapshotsFieldBuilder().getBuilderList(); @@ -17104,68 +19987,161 @@ public final class MasterAdminProtos { } return snapshotsBuilder_; } - + // @@protoc_insertion_point(builder_scope:ListSnapshotResponse) } - + static { defaultInstance = new ListSnapshotResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ListSnapshotResponse) } - + public interface DeleteSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .SnapshotDescription snapshot = 1; + /** + * required .SnapshotDescription snapshot = 1; + */ boolean hasSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code DeleteSnapshotRequest} + */ public static final class DeleteSnapshotRequest extends com.google.protobuf.GeneratedMessage implements DeleteSnapshotRequestOrBuilder { // Use DeleteSnapshotRequest.newBuilder() to construct. - private DeleteSnapshotRequest(Builder builder) { + private DeleteSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteSnapshotRequest(boolean noInit) {} - + private DeleteSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteSnapshotRequest defaultInstance; public static DeleteSnapshotRequest getDefaultInstance() { return defaultInstance; } - + public DeleteSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteSnapshotRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteSnapshotRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } @@ -17173,7 +20149,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; @@ -17185,7 +20161,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -17194,12 +20170,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -17209,14 +20185,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -17226,7 +20202,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest) obj; - + boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { @@ -17237,9 +20213,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { @@ -17247,89 +20227,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteSnapshotRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequestOrBuilder { @@ -17337,18 +20307,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -17360,7 +20333,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { @@ -17371,20 +20344,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { @@ -17392,17 +20365,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest(this); int from_bitField0_ = bitField0_; @@ -17419,7 +20382,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest)other); @@ -17428,7 +20391,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { @@ -17437,7 +20400,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSnapshot()) { @@ -17449,52 +20412,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -17502,6 +20452,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -17515,6 +20468,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -17526,6 +20482,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -17542,6 +20501,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -17552,11 +20514,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -17564,6 +20532,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * required .SnapshotDescription snapshot = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -17577,84 +20548,145 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:DeleteSnapshotRequest) } - + static { defaultInstance = new DeleteSnapshotRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteSnapshotRequest) } - + public interface DeleteSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code DeleteSnapshotResponse} + */ public static final class DeleteSnapshotResponse extends com.google.protobuf.GeneratedMessage implements DeleteSnapshotResponseOrBuilder { // Use DeleteSnapshotResponse.newBuilder() to construct. - private DeleteSnapshotResponse(Builder builder) { + private DeleteSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DeleteSnapshotResponse(boolean noInit) {} - + private DeleteSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DeleteSnapshotResponse defaultInstance; public static DeleteSnapshotResponse getDefaultInstance() { return defaultInstance; } - + public DeleteSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteSnapshotResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeleteSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteSnapshotResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -17664,101 +20696,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DeleteSnapshotResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponseOrBuilder { @@ -17766,18 +20792,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -17788,25 +20817,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteSnapshotResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { @@ -17814,23 +20843,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse)other); @@ -17839,106 +20858,189 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:DeleteSnapshotResponse) } - + static { defaultInstance = new DeleteSnapshotResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DeleteSnapshotResponse) } - + public interface RestoreSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .SnapshotDescription snapshot = 1; + /** + * required .SnapshotDescription snapshot = 1; + */ boolean hasSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * required .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code RestoreSnapshotRequest} + */ public static final class RestoreSnapshotRequest extends com.google.protobuf.GeneratedMessage implements RestoreSnapshotRequestOrBuilder { // Use RestoreSnapshotRequest.newBuilder() to construct. - private RestoreSnapshotRequest(Builder builder) { + private RestoreSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RestoreSnapshotRequest(boolean noInit) {} - + private RestoreSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RestoreSnapshotRequest defaultInstance; public static RestoreSnapshotRequest getDefaultInstance() { return defaultInstance; } - + public RestoreSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RestoreSnapshotRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RestoreSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreSnapshotRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } @@ -17946,7 +21048,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; @@ -17958,7 +21060,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -17967,12 +21069,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -17982,14 +21084,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -17999,7 +21101,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest) obj; - + boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { @@ -18010,9 +21112,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { @@ -18020,89 +21126,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RestoreSnapshotRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequestOrBuilder { @@ -18110,18 +21206,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -18133,7 +21232,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { @@ -18144,20 +21243,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { @@ -18165,17 +21264,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest(this); int from_bitField0_ = bitField0_; @@ -18192,7 +21281,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest)other); @@ -18201,7 +21290,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { @@ -18210,7 +21299,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSnapshot()) { @@ -18222,52 +21311,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * required .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -18275,6 +21351,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -18288,6 +21367,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -18299,6 +21381,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -18315,6 +21400,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -18325,11 +21413,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * required .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -18337,6 +21431,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * required .SnapshotDescription snapshot = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -18350,84 +21447,145 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:RestoreSnapshotRequest) } - + static { defaultInstance = new RestoreSnapshotRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RestoreSnapshotRequest) } - + public interface RestoreSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code RestoreSnapshotResponse} + */ public static final class RestoreSnapshotResponse extends com.google.protobuf.GeneratedMessage implements RestoreSnapshotResponseOrBuilder { // Use RestoreSnapshotResponse.newBuilder() to construct. - private RestoreSnapshotResponse(Builder builder) { + private RestoreSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RestoreSnapshotResponse(boolean noInit) {} - + private RestoreSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RestoreSnapshotResponse defaultInstance; public static RestoreSnapshotResponse getDefaultInstance() { return defaultInstance; } - + public RestoreSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RestoreSnapshotResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RestoreSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreSnapshotResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -18437,101 +21595,95 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RestoreSnapshotResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponseOrBuilder { @@ -18539,18 +21691,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -18561,25 +21716,25 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RestoreSnapshotResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { @@ -18587,23 +21742,13 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse)other); @@ -18612,106 +21757,194 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:RestoreSnapshotResponse) } - + static { defaultInstance = new RestoreSnapshotResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RestoreSnapshotResponse) } - + public interface IsSnapshotDoneRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .SnapshotDescription snapshot = 1; + /** + * optional .SnapshotDescription snapshot = 1; + */ boolean hasSnapshot(); + /** + * optional .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * optional .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code IsSnapshotDoneRequest} + * + *
+   * if you don't send the snapshot, then you will get it back
+   * in the response (if the snapshot is done) so you can check the snapshot
+   * 
+ */ public static final class IsSnapshotDoneRequest extends com.google.protobuf.GeneratedMessage implements IsSnapshotDoneRequestOrBuilder { // Use IsSnapshotDoneRequest.newBuilder() to construct. - private IsSnapshotDoneRequest(Builder builder) { + private IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsSnapshotDoneRequest(boolean noInit) {} - + private IsSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsSnapshotDoneRequest defaultInstance; public static IsSnapshotDoneRequest getDefaultInstance() { return defaultInstance; } - + public IsSnapshotDoneRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsSnapshotDoneRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSnapshotDoneRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSnapshotDoneRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * optional .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } @@ -18719,7 +21952,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; @@ -18729,7 +21962,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -18738,12 +21971,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -18753,14 +21986,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -18770,7 +22003,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest) obj; - + boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { @@ -18781,9 +22014,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { @@ -18791,89 +22028,84 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsSnapshotDoneRequest} + * + *
+     * if you don't send the snapshot, then you will get it back
+     * in the response (if the snapshot is done) so you can check the snapshot
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequestOrBuilder { @@ -18881,18 +22113,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -18904,7 +22139,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { @@ -18915,20 +22150,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest result = buildPartial(); if (!result.isInitialized()) { @@ -18936,17 +22171,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest(this); int from_bitField0_ = bitField0_; @@ -18963,7 +22188,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest)other); @@ -18972,7 +22197,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { @@ -18981,7 +22206,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -18991,52 +22216,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * optional .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -19044,6 +22256,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -19057,6 +22272,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -19068,6 +22286,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -19084,6 +22305,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -19094,11 +22318,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -19106,6 +22336,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * optional .SnapshotDescription snapshot = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -19119,82 +22352,192 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:IsSnapshotDoneRequest) } - + static { defaultInstance = new IsSnapshotDoneRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsSnapshotDoneRequest) } - + public interface IsSnapshotDoneResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bool done = 1 [default = false]; + /** + * optional bool done = 1 [default = false]; + */ boolean hasDone(); + /** + * optional bool done = 1 [default = false]; + */ boolean getDone(); - + // optional .SnapshotDescription snapshot = 2; + /** + * optional .SnapshotDescription snapshot = 2; + */ boolean hasSnapshot(); + /** + * optional .SnapshotDescription snapshot = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * optional .SnapshotDescription snapshot = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code IsSnapshotDoneResponse} + */ public static final class IsSnapshotDoneResponse extends com.google.protobuf.GeneratedMessage implements IsSnapshotDoneResponseOrBuilder { // Use IsSnapshotDoneResponse.newBuilder() to construct. - private IsSnapshotDoneResponse(Builder builder) { + private IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsSnapshotDoneResponse(boolean noInit) {} - + private IsSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsSnapshotDoneResponse defaultInstance; public static IsSnapshotDoneResponse getDefaultInstance() { return defaultInstance; } - + public IsSnapshotDoneResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsSnapshotDoneResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + done_ = input.readBool(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSnapshotDoneResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSnapshotDoneResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; + /** + * optional bool done = 1 [default = false]; + */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool done = 1 [default = false]; + */ public boolean getDone() { return done_; } - + // optional .SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * optional .SnapshotDescription snapshot = 2; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .SnapshotDescription snapshot = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * optional .SnapshotDescription snapshot = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { done_ = false; snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -19203,7 +22546,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; @@ -19213,7 +22556,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -19225,12 +22568,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -19244,14 +22587,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -19261,7 +22604,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse) obj; - + boolean result = true; result = result && (hasDone() == other.hasDone()); if (hasDone()) { @@ -19277,9 +22620,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { @@ -19291,89 +22638,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsSnapshotDoneResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponseOrBuilder { @@ -19381,18 +22718,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -19404,7 +22744,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); done_ = false; @@ -19417,20 +22757,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsSnapshotDoneResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse result = buildPartial(); if (!result.isInitialized()) { @@ -19438,17 +22778,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse(this); int from_bitField0_ = bitField0_; @@ -19469,7 +22799,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse)other); @@ -19478,7 +22808,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.getDefaultInstance()) return this; if (other.hasDone()) { @@ -19490,7 +22820,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -19500,78 +22830,72 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - done_ = input.readBool(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bool done = 1 [default = false]; private boolean done_ ; + /** + * optional bool done = 1 [default = false]; + */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool done = 1 [default = false]; + */ public boolean getDone() { return done_; } + /** + * optional bool done = 1 [default = false]; + */ public Builder setDone(boolean value) { bitField0_ |= 0x00000001; done_ = value; onChanged(); return this; } + /** + * optional bool done = 1 [default = false]; + */ public Builder clearDone() { bitField0_ = (bitField0_ & ~0x00000001); done_ = false; onChanged(); return this; } - + // optional .SnapshotDescription snapshot = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * optional .SnapshotDescription snapshot = 2; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .SnapshotDescription snapshot = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -19579,6 +22903,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * optional .SnapshotDescription snapshot = 2; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -19592,6 +22919,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .SnapshotDescription snapshot = 2; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -19603,6 +22933,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .SnapshotDescription snapshot = 2; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -19619,6 +22952,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .SnapshotDescription snapshot = 2; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -19629,11 +22965,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .SnapshotDescription snapshot = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * optional .SnapshotDescription snapshot = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -19641,6 +22983,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * optional .SnapshotDescription snapshot = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -19654,68 +22999,161 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:IsSnapshotDoneResponse) } - + static { defaultInstance = new IsSnapshotDoneResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsSnapshotDoneResponse) } - + public interface IsRestoreSnapshotDoneRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .SnapshotDescription snapshot = 1; + /** + * optional .SnapshotDescription snapshot = 1; + */ boolean hasSnapshot(); + /** + * optional .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * optional .SnapshotDescription snapshot = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } + /** + * Protobuf type {@code IsRestoreSnapshotDoneRequest} + */ public static final class IsRestoreSnapshotDoneRequest extends com.google.protobuf.GeneratedMessage implements IsRestoreSnapshotDoneRequestOrBuilder { // Use IsRestoreSnapshotDoneRequest.newBuilder() to construct. - private IsRestoreSnapshotDoneRequest(Builder builder) { + private IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsRestoreSnapshotDoneRequest(boolean noInit) {} - + private IsRestoreSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsRestoreSnapshotDoneRequest defaultInstance; public static IsRestoreSnapshotDoneRequest getDefaultInstance() { return defaultInstance; } - + public IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsRestoreSnapshotDoneRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsRestoreSnapshotDoneRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsRestoreSnapshotDoneRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * optional .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } - + private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } @@ -19723,7 +23161,7 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; @@ -19733,7 +23171,7 @@ public final class MasterAdminProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -19742,12 +23180,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -19757,14 +23195,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -19774,7 +23212,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest) obj; - + boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { @@ -19785,9 +23223,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { @@ -19795,89 +23237,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsRestoreSnapshotDoneRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequestOrBuilder { @@ -19885,18 +23317,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -19908,7 +23343,7 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { @@ -19919,20 +23354,20 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest result = buildPartial(); if (!result.isInitialized()) { @@ -19940,17 +23375,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest(this); int from_bitField0_ = bitField0_; @@ -19967,7 +23392,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest)other); @@ -19976,7 +23401,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { @@ -19985,7 +23410,7 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -19995,52 +23420,39 @@ public final class MasterAdminProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(); - if (hasSnapshot()) { - subBuilder.mergeFrom(getSnapshot()); - } - input.readMessage(subBuilder, extensionRegistry); - setSnapshot(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * optional .SnapshotDescription snapshot = 1; + */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; @@ -20048,6 +23460,9 @@ public final class MasterAdminProtos { return snapshotBuilder_.getMessage(); } } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { @@ -20061,6 +23476,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { @@ -20072,6 +23490,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -20088,6 +23509,9 @@ public final class MasterAdminProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); @@ -20098,11 +23522,17 @@ public final class MasterAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } + /** + * optional .SnapshotDescription snapshot = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); @@ -20110,6 +23540,9 @@ public final class MasterAdminProtos { return snapshot_; } } + /** + * optional .SnapshotDescription snapshot = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { @@ -20123,64 +23556,143 @@ public final class MasterAdminProtos { } return snapshotBuilder_; } - + // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneRequest) } - + static { defaultInstance = new IsRestoreSnapshotDoneRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneRequest) } - + public interface IsRestoreSnapshotDoneResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bool done = 1 [default = true]; + /** + * optional bool done = 1 [default = true]; + */ boolean hasDone(); + /** + * optional bool done = 1 [default = true]; + */ boolean getDone(); } + /** + * Protobuf type {@code IsRestoreSnapshotDoneResponse} + */ public static final class IsRestoreSnapshotDoneResponse extends com.google.protobuf.GeneratedMessage implements IsRestoreSnapshotDoneResponseOrBuilder { // Use IsRestoreSnapshotDoneResponse.newBuilder() to construct. - private IsRestoreSnapshotDoneResponse(Builder builder) { + private IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsRestoreSnapshotDoneResponse(boolean noInit) {} - + private IsRestoreSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsRestoreSnapshotDoneResponse defaultInstance; public static IsRestoreSnapshotDoneResponse getDefaultInstance() { return defaultInstance; } - + public IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsRestoreSnapshotDoneResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + done_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsRestoreSnapshotDoneResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsRestoreSnapshotDoneResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bool done = 1 [default = true]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; + /** + * optional bool done = 1 [default = true]; + */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool done = 1 [default = true]; + */ public boolean getDone() { return done_; } - + private void initFields() { done_ = true; } @@ -20188,11 +23700,11 @@ public final class MasterAdminProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -20201,12 +23713,12 @@ public final class MasterAdminProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -20216,14 +23728,14 @@ public final class MasterAdminProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -20233,7 +23745,7 @@ public final class MasterAdminProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse) obj; - + boolean result = true; result = result && (hasDone() == other.hasDone()); if (hasDone()) { @@ -20244,9 +23756,13 @@ public final class MasterAdminProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { @@ -20254,89 +23770,79 @@ public final class MasterAdminProtos { hash = (53 * hash) + hashBoolean(getDone()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsRestoreSnapshotDoneResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponseOrBuilder { @@ -20344,18 +23850,21 @@ public final class MasterAdminProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -20366,27 +23875,27 @@ public final class MasterAdminProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); done_ = true; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse result = buildPartial(); if (!result.isInitialized()) { @@ -20394,17 +23903,7 @@ public final class MasterAdminProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse(this); int from_bitField0_ = bitField0_; @@ -20417,7 +23916,7 @@ public final class MasterAdminProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse)other); @@ -20426,7 +23925,7 @@ public final class MasterAdminProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()) return this; if (other.hasDone()) { @@ -20435,219 +23934,434 @@ public final class MasterAdminProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - done_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bool done = 1 [default = true]; private boolean done_ = true; + /** + * optional bool done = 1 [default = true]; + */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool done = 1 [default = true]; + */ public boolean getDone() { return done_; } + /** + * optional bool done = 1 [default = true]; + */ public Builder setDone(boolean value) { bitField0_ |= 0x00000001; done_ = value; onChanged(); return this; } + /** + * optional bool done = 1 [default = true]; + */ public Builder clearDone() { bitField0_ = (bitField0_ & ~0x00000001); done_ = true; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneResponse) } - + static { defaultInstance = new IsRestoreSnapshotDoneResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneResponse) } - + + /** + * Protobuf service {@code MasterAdminService} + */ public static abstract class MasterAdminService implements com.google.protobuf.Service { protected MasterAdminService() {} - + public interface Interface { + /** + * rpc addColumn(.AddColumnRequest) returns (.AddColumnResponse); + * + *
+       ** Adds a column to the specified table. 
+       * 
+ */ public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse); + * + *
+       ** Deletes a column from the specified table. Table must be disabled. 
+       * 
+ */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc modifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse); + * + *
+       ** Modifies an existing column on the specified table. 
+       * 
+ */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc moveRegion(.MoveRegionRequest) returns (.MoveRegionResponse); + * + *
+       ** Move the region region to the destination server. 
+       * 
+ */ public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc dispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse); + * + *
+       ** Master dispatch merging the regions 
+       * 
+ */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc assignRegion(.AssignRegionRequest) returns (.AssignRegionResponse); + * + *
+       ** Assign a region to a server chosen at random. 
+       * 
+ */ public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc unassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse); + * + *
+       **
+       * Unassign a region from current hosting regionserver.  Region will then be
+       * assigned to a regionserver chosen at random.  Region could be reassigned
+       * back to the same server.  Use moveRegion if you want
+       * to control the region movement.
+       * 
+ */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc offlineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse); + * + *
+       **
+       * Offline a region from the assignment manager's in-memory state.  The
+       * region should be in a closed state and there will be no attempt to
+       * automatically reassign the region as in unassign.   This is a special
+       * method, and should only be used by experts or hbck.
+       * 
+ */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteTable(.DeleteTableRequest) returns (.DeleteTableResponse); + * + *
+       ** Deletes a table 
+       * 
+ */ public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc enableTable(.EnableTableRequest) returns (.EnableTableResponse); + * + *
+       ** Puts the table on-line (only needed if table has been previously taken offline) 
+       * 
+ */ public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc disableTable(.DisableTableRequest) returns (.DisableTableResponse); + * + *
+       ** Take table offline 
+       * 
+ */ public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc modifyTable(.ModifyTableRequest) returns (.ModifyTableResponse); + * + *
+       ** Modify a table's metadata 
+       * 
+ */ public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc createTable(.CreateTableRequest) returns (.CreateTableResponse); + * + *
+       ** Creates a new table asynchronously 
+       * 
+ */ public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc shutdown(.ShutdownRequest) returns (.ShutdownResponse); + * + *
+       ** Shutdown an HBase cluster. 
+       * 
+ */ public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc stopMaster(.StopMasterRequest) returns (.StopMasterResponse); + * + *
+       ** Stop HBase Master only.  Does not shutdown the cluster. 
+       * 
+ */ public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc balance(.BalanceRequest) returns (.BalanceResponse); + * + *
+       **
+       * Run the balancer.  Will run the balancer and if regions to move, it will
+       * go ahead and do the reassignments.  Can NOT run for various reasons.
+       * Check logs.
+       * 
+ */ public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc setBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse); + * + *
+       **
+       * Turn the load balancer on or off.
+       * If synchronous is true, it waits until current balance() call, if outstanding, to return.
+       * 
+ */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc runCatalogScan(.CatalogScanRequest) returns (.CatalogScanResponse); + * + *
+       ** Get a run of the catalog janitor 
+       * 
+ */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc enableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse); + * + *
+       **
+       * Enable the catalog janitor on or off.
+       * 
+ */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse); + * + *
+       **
+       * Query whether the catalog janitor is enabled.
+       * 
+ */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc execMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse); + * + *
+       **
+       * Call a master coprocessor endpoint
+       * 
+ */ public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc snapshot(.TakeSnapshotRequest) returns (.TakeSnapshotResponse); + * + *
+       ** 
+       * Create a snapshot for the given table.
+       * @param snapshot description of the snapshot to take
+       * 
+ */ public abstract void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getCompletedSnapshots(.ListSnapshotRequest) returns (.ListSnapshotResponse); + * + *
+       **
+       * List completed snapshots.
+       * @return a list of snapshot descriptors for completed snapshots
+       * 
+ */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse); + * + *
+       **
+       * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
+       * @param snapshotName snapshot to delete
+       * 
+ */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse); + * + *
+       **
+       * Determine if the snapshot is done yet.
+       * 
+ */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc restoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse); + * + *
+       **
+       * Restore a snapshot
+       * @param snapshot description of the snapshot to restore
+       * 
+ */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse); + * + *
+       **
+       * Determine if the snapshot restore is done yet.
+       * 
+ */ public abstract void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterAdminService() { @@ -20658,7 +24372,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.addColumn(controller, request, done); } - + @java.lang.Override public void deleteColumn( com.google.protobuf.RpcController controller, @@ -20666,7 +24380,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.deleteColumn(controller, request, done); } - + @java.lang.Override public void modifyColumn( com.google.protobuf.RpcController controller, @@ -20674,7 +24388,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.modifyColumn(controller, request, done); } - + @java.lang.Override public void moveRegion( com.google.protobuf.RpcController controller, @@ -20682,7 +24396,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.moveRegion(controller, request, done); } - + @java.lang.Override public void dispatchMergingRegions( com.google.protobuf.RpcController controller, @@ -20690,7 +24404,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.dispatchMergingRegions(controller, request, done); } - + @java.lang.Override public void assignRegion( com.google.protobuf.RpcController controller, @@ -20698,7 +24412,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.assignRegion(controller, request, done); } - + @java.lang.Override public void unassignRegion( com.google.protobuf.RpcController controller, @@ -20706,7 +24420,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.unassignRegion(controller, request, done); } - + @java.lang.Override public void offlineRegion( com.google.protobuf.RpcController controller, @@ -20714,7 +24428,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.offlineRegion(controller, request, done); } - + @java.lang.Override public void deleteTable( com.google.protobuf.RpcController controller, @@ -20722,7 +24436,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.deleteTable(controller, request, done); } - + @java.lang.Override public void enableTable( com.google.protobuf.RpcController controller, @@ -20730,7 +24444,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.enableTable(controller, request, done); } - + @java.lang.Override public void disableTable( com.google.protobuf.RpcController controller, @@ -20738,7 +24452,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.disableTable(controller, request, done); } - + @java.lang.Override public void modifyTable( com.google.protobuf.RpcController controller, @@ -20746,7 +24460,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.modifyTable(controller, request, done); } - + @java.lang.Override public void createTable( com.google.protobuf.RpcController controller, @@ -20754,7 +24468,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.createTable(controller, request, done); } - + @java.lang.Override public void shutdown( com.google.protobuf.RpcController controller, @@ -20762,7 +24476,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.shutdown(controller, request, done); } - + @java.lang.Override public void stopMaster( com.google.protobuf.RpcController controller, @@ -20770,7 +24484,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.stopMaster(controller, request, done); } - + @java.lang.Override public void balance( com.google.protobuf.RpcController controller, @@ -20778,7 +24492,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.balance(controller, request, done); } - + @java.lang.Override public void setBalancerRunning( com.google.protobuf.RpcController controller, @@ -20786,7 +24500,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.setBalancerRunning(controller, request, done); } - + @java.lang.Override public void runCatalogScan( com.google.protobuf.RpcController controller, @@ -20794,7 +24508,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.runCatalogScan(controller, request, done); } - + @java.lang.Override public void enableCatalogJanitor( com.google.protobuf.RpcController controller, @@ -20802,7 +24516,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.enableCatalogJanitor(controller, request, done); } - + @java.lang.Override public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, @@ -20810,7 +24524,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.isCatalogJanitorEnabled(controller, request, done); } - + @java.lang.Override public void execMasterService( com.google.protobuf.RpcController controller, @@ -20818,7 +24532,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.execMasterService(controller, request, done); } - + @java.lang.Override public void snapshot( com.google.protobuf.RpcController controller, @@ -20826,7 +24540,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.snapshot(controller, request, done); } - + @java.lang.Override public void getCompletedSnapshots( com.google.protobuf.RpcController controller, @@ -20834,7 +24548,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.getCompletedSnapshots(controller, request, done); } - + @java.lang.Override public void deleteSnapshot( com.google.protobuf.RpcController controller, @@ -20842,7 +24556,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.deleteSnapshot(controller, request, done); } - + @java.lang.Override public void isSnapshotDone( com.google.protobuf.RpcController controller, @@ -20850,7 +24564,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.isSnapshotDone(controller, request, done); } - + @java.lang.Override public void restoreSnapshot( com.google.protobuf.RpcController controller, @@ -20858,7 +24572,7 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.restoreSnapshot(controller, request, done); } - + @java.lang.Override public void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, @@ -20866,10 +24580,10 @@ public final class MasterAdminProtos { com.google.protobuf.RpcCallback done) { impl.isRestoreSnapshotDone(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -20877,7 +24591,7 @@ public final class MasterAdminProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -20947,7 +24661,7 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -21015,7 +24729,7 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -21083,145 +24797,360 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc addColumn(.AddColumnRequest) returns (.AddColumnResponse); + * + *
+     ** Adds a column to the specified table. 
+     * 
+ */ public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse); + * + *
+     ** Deletes a column from the specified table. Table must be disabled. 
+     * 
+ */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc modifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse); + * + *
+     ** Modifies an existing column on the specified table. 
+     * 
+ */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc moveRegion(.MoveRegionRequest) returns (.MoveRegionResponse); + * + *
+     ** Move the region region to the destination server. 
+     * 
+ */ public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc dispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse); + * + *
+     ** Master dispatch merging the regions 
+     * 
+ */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc assignRegion(.AssignRegionRequest) returns (.AssignRegionResponse); + * + *
+     ** Assign a region to a server chosen at random. 
+     * 
+ */ public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc unassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse); + * + *
+     **
+     * Unassign a region from current hosting regionserver.  Region will then be
+     * assigned to a regionserver chosen at random.  Region could be reassigned
+     * back to the same server.  Use moveRegion if you want
+     * to control the region movement.
+     * 
+ */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc offlineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse); + * + *
+     **
+     * Offline a region from the assignment manager's in-memory state.  The
+     * region should be in a closed state and there will be no attempt to
+     * automatically reassign the region as in unassign.   This is a special
+     * method, and should only be used by experts or hbck.
+     * 
+ */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteTable(.DeleteTableRequest) returns (.DeleteTableResponse); + * + *
+     ** Deletes a table 
+     * 
+ */ public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc enableTable(.EnableTableRequest) returns (.EnableTableResponse); + * + *
+     ** Puts the table on-line (only needed if table has been previously taken offline) 
+     * 
+ */ public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc disableTable(.DisableTableRequest) returns (.DisableTableResponse); + * + *
+     ** Take table offline 
+     * 
+ */ public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc modifyTable(.ModifyTableRequest) returns (.ModifyTableResponse); + * + *
+     ** Modify a table's metadata 
+     * 
+ */ public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc createTable(.CreateTableRequest) returns (.CreateTableResponse); + * + *
+     ** Creates a new table asynchronously 
+     * 
+ */ public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc shutdown(.ShutdownRequest) returns (.ShutdownResponse); + * + *
+     ** Shutdown an HBase cluster. 
+     * 
+ */ public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc stopMaster(.StopMasterRequest) returns (.StopMasterResponse); + * + *
+     ** Stop HBase Master only.  Does not shutdown the cluster. 
+     * 
+ */ public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc balance(.BalanceRequest) returns (.BalanceResponse); + * + *
+     **
+     * Run the balancer.  Will run the balancer and if regions to move, it will
+     * go ahead and do the reassignments.  Can NOT run for various reasons.
+     * Check logs.
+     * 
+ */ public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc setBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse); + * + *
+     **
+     * Turn the load balancer on or off.
+     * If synchronous is true, it waits until current balance() call, if outstanding, to return.
+     * 
+ */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc runCatalogScan(.CatalogScanRequest) returns (.CatalogScanResponse); + * + *
+     ** Get a run of the catalog janitor 
+     * 
+ */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc enableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse); + * + *
+     **
+     * Enable the catalog janitor on or off.
+     * 
+ */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse); + * + *
+     **
+     * Query whether the catalog janitor is enabled.
+     * 
+ */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc execMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse); + * + *
+     **
+     * Call a master coprocessor endpoint
+     * 
+ */ public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc snapshot(.TakeSnapshotRequest) returns (.TakeSnapshotResponse); + * + *
+     ** 
+     * Create a snapshot for the given table.
+     * @param snapshot description of the snapshot to take
+     * 
+ */ public abstract void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getCompletedSnapshots(.ListSnapshotRequest) returns (.ListSnapshotResponse); + * + *
+     **
+     * List completed snapshots.
+     * @return a list of snapshot descriptors for completed snapshots
+     * 
+ */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc deleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse); + * + *
+     **
+     * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
+     * @param snapshotName snapshot to delete
+     * 
+ */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse); + * + *
+     **
+     * Determine if the snapshot is done yet.
+     * 
+ */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc restoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse); + * + *
+     **
+     * Restore a snapshot
+     * @param snapshot description of the snapshot to restore
+     * 
+ */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc isRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse); + * + *
+     **
+     * Determine if the snapshot restore is done yet.
+     * 
+ */ public abstract void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -21231,7 +25160,7 @@ public final class MasterAdminProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -21383,7 +25312,7 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -21451,7 +25380,7 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -21519,23 +25448,23 @@ public final class MasterAdminProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MasterAdminService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, @@ -21550,7 +25479,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance())); } - + public void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, @@ -21565,7 +25494,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance())); } - + public void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, @@ -21580,7 +25509,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance())); } - + public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, @@ -21595,7 +25524,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance())); } - + public void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest request, @@ -21610,7 +25539,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.getDefaultInstance())); } - + public void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, @@ -21625,7 +25554,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance())); } - + public void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, @@ -21640,7 +25569,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance())); } - + public void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, @@ -21655,7 +25584,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance())); } - + public void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, @@ -21670,7 +25599,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance())); } - + public void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, @@ -21685,7 +25614,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance())); } - + public void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, @@ -21700,7 +25629,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance())); } - + public void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, @@ -21715,7 +25644,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance())); } - + public void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, @@ -21730,7 +25659,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance())); } - + public void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, @@ -21745,7 +25674,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance())); } - + public void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, @@ -21760,7 +25689,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance())); } - + public void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, @@ -21775,7 +25704,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance())); } - + public void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, @@ -21790,7 +25719,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance())); } - + public void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, @@ -21805,7 +25734,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance())); } - + public void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, @@ -21820,7 +25749,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance())); } - + public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, @@ -21835,7 +25764,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); } - + public void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, @@ -21850,7 +25779,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } - + public void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request, @@ -21865,7 +25794,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance())); } - + public void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, @@ -21880,7 +25809,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance())); } - + public void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest request, @@ -21895,7 +25824,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance())); } - + public void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest request, @@ -21910,7 +25839,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.getDefaultInstance())); } - + public void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest request, @@ -21925,7 +25854,7 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.getDefaultInstance())); } - + public void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest request, @@ -21941,156 +25870,156 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) @@ -22101,8 +26030,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) @@ -22113,8 +26042,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) @@ -22125,8 +26054,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) @@ -22137,8 +26066,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest request) @@ -22149,8 +26078,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) @@ -22161,8 +26090,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) @@ -22173,8 +26102,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) @@ -22185,8 +26114,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) @@ -22197,8 +26126,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) @@ -22209,8 +26138,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) @@ -22221,8 +26150,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) @@ -22233,8 +26162,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) @@ -22245,8 +26174,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) @@ -22257,8 +26186,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) @@ -22269,8 +26198,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) @@ -22281,8 +26210,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) @@ -22293,8 +26222,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) @@ -22305,8 +26234,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) @@ -22317,8 +26246,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) @@ -22329,8 +26258,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) @@ -22341,8 +26270,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request) @@ -22353,8 +26282,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request) @@ -22365,8 +26294,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest request) @@ -22377,8 +26306,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest request) @@ -22389,8 +26318,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest request) @@ -22401,8 +26330,8 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest request) @@ -22413,10 +26342,12 @@ public final class MasterAdminProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:MasterAdminService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_AddColumnRequest_descriptor; private static @@ -22677,7 +26608,7 @@ public final class MasterAdminProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -22805,417 +26736,313 @@ public final class MasterAdminProtos { internal_static_AddColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); + new java.lang.String[] { "TableName", "ColumnFamilies", }); internal_static_AddColumnResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_AddColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); + new java.lang.String[] { }); internal_static_DeleteColumnRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_DeleteColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); + new java.lang.String[] { "TableName", "ColumnName", }); internal_static_DeleteColumnResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_DeleteColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); + new java.lang.String[] { }); internal_static_ModifyColumnRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_ModifyColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); + new java.lang.String[] { "TableName", "ColumnFamilies", }); internal_static_ModifyColumnResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ModifyColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); + new java.lang.String[] { }); internal_static_MoveRegionRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_MoveRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionRequest_descriptor, - new java.lang.String[] { "Region", "DestServerName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); + new java.lang.String[] { "Region", "DestServerName", }); internal_static_MoveRegionResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_MoveRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_DispatchMergingRegionsRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_DispatchMergingRegionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DispatchMergingRegionsRequest_descriptor, - new java.lang.String[] { "RegionA", "RegionB", "Forcible", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsRequest.Builder.class); + new java.lang.String[] { "RegionA", "RegionB", "Forcible", }); internal_static_DispatchMergingRegionsResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_DispatchMergingRegionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DispatchMergingRegionsResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DispatchMergingRegionsResponse.Builder.class); + new java.lang.String[] { }); internal_static_AssignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_AssignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionRequest_descriptor, - new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); + new java.lang.String[] { "Region", }); internal_static_AssignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_AssignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_UnassignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_UnassignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionRequest_descriptor, - new java.lang.String[] { "Region", "Force", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); + new java.lang.String[] { "Region", "Force", }); internal_static_UnassignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_UnassignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_OfflineRegionRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_OfflineRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionRequest_descriptor, - new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); + new java.lang.String[] { "Region", }); internal_static_OfflineRegionResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_OfflineRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); + new java.lang.String[] { }); internal_static_CreateTableRequest_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_CreateTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableRequest_descriptor, - new java.lang.String[] { "TableSchema", "SplitKeys", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); + new java.lang.String[] { "TableSchema", "SplitKeys", }); internal_static_CreateTableResponse_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_CreateTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); + new java.lang.String[] { }); internal_static_DeleteTableRequest_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_DeleteTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); + new java.lang.String[] { "TableName", }); internal_static_DeleteTableResponse_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_DeleteTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); + new java.lang.String[] { }); internal_static_EnableTableRequest_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_EnableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); + new java.lang.String[] { "TableName", }); internal_static_EnableTableResponse_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_EnableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); + new java.lang.String[] { }); internal_static_DisableTableRequest_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_DisableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); + new java.lang.String[] { "TableName", }); internal_static_DisableTableResponse_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_DisableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); + new java.lang.String[] { }); internal_static_ModifyTableRequest_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_ModifyTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableRequest_descriptor, - new java.lang.String[] { "TableName", "TableSchema", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); + new java.lang.String[] { "TableName", "TableSchema", }); internal_static_ModifyTableResponse_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_ModifyTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); + new java.lang.String[] { }); internal_static_ShutdownRequest_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_ShutdownRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); + new java.lang.String[] { }); internal_static_ShutdownResponse_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_ShutdownResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); + new java.lang.String[] { }); internal_static_StopMasterRequest_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_StopMasterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); + new java.lang.String[] { }); internal_static_StopMasterResponse_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_StopMasterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); + new java.lang.String[] { }); internal_static_BalanceRequest_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_BalanceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); + new java.lang.String[] { }); internal_static_BalanceResponse_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_BalanceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceResponse_descriptor, - new java.lang.String[] { "BalancerRan", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); + new java.lang.String[] { "BalancerRan", }); internal_static_SetBalancerRunningRequest_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_SetBalancerRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningRequest_descriptor, - new java.lang.String[] { "On", "Synchronous", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); + new java.lang.String[] { "On", "Synchronous", }); internal_static_SetBalancerRunningResponse_descriptor = getDescriptor().getMessageTypes().get(33); internal_static_SetBalancerRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningResponse_descriptor, - new java.lang.String[] { "PrevBalanceValue", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); + new java.lang.String[] { "PrevBalanceValue", }); internal_static_CatalogScanRequest_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_CatalogScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CatalogScanRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); + new java.lang.String[] { }); internal_static_CatalogScanResponse_descriptor = getDescriptor().getMessageTypes().get(35); internal_static_CatalogScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CatalogScanResponse_descriptor, - new java.lang.String[] { "ScanResult", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); + new java.lang.String[] { "ScanResult", }); internal_static_EnableCatalogJanitorRequest_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorRequest_descriptor, - new java.lang.String[] { "Enable", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); + new java.lang.String[] { "Enable", }); internal_static_EnableCatalogJanitorResponse_descriptor = getDescriptor().getMessageTypes().get(37); internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorResponse_descriptor, - new java.lang.String[] { "PrevValue", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); + new java.lang.String[] { "PrevValue", }); internal_static_IsCatalogJanitorEnabledRequest_descriptor = getDescriptor().getMessageTypes().get(38); internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); + new java.lang.String[] { }); internal_static_IsCatalogJanitorEnabledResponse_descriptor = getDescriptor().getMessageTypes().get(39); internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledResponse_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); + new java.lang.String[] { "Value", }); internal_static_TakeSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(40); internal_static_TakeSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TakeSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.Builder.class); + new java.lang.String[] { "Snapshot", }); internal_static_TakeSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(41); internal_static_TakeSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TakeSnapshotResponse_descriptor, - new java.lang.String[] { "ExpectedTimeout", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.Builder.class); + new java.lang.String[] { "ExpectedTimeout", }); internal_static_ListSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(42); internal_static_ListSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListSnapshotRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.Builder.class); + new java.lang.String[] { }); internal_static_ListSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(43); internal_static_ListSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListSnapshotResponse_descriptor, - new java.lang.String[] { "Snapshots", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.Builder.class); + new java.lang.String[] { "Snapshots", }); internal_static_DeleteSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(44); internal_static_DeleteSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.Builder.class); + new java.lang.String[] { "Snapshot", }); internal_static_DeleteSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(45); internal_static_DeleteSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteSnapshotResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.Builder.class); + new java.lang.String[] { }); internal_static_RestoreSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(46); internal_static_RestoreSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RestoreSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotRequest.Builder.class); + new java.lang.String[] { "Snapshot", }); internal_static_RestoreSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(47); internal_static_RestoreSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RestoreSnapshotResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RestoreSnapshotResponse.Builder.class); + new java.lang.String[] { }); internal_static_IsSnapshotDoneRequest_descriptor = getDescriptor().getMessageTypes().get(48); internal_static_IsSnapshotDoneRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsSnapshotDoneRequest_descriptor, - new java.lang.String[] { "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest.Builder.class); + new java.lang.String[] { "Snapshot", }); internal_static_IsSnapshotDoneResponse_descriptor = getDescriptor().getMessageTypes().get(49); internal_static_IsSnapshotDoneResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsSnapshotDoneResponse_descriptor, - new java.lang.String[] { "Done", "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse.Builder.class); + new java.lang.String[] { "Done", "Snapshot", }); internal_static_IsRestoreSnapshotDoneRequest_descriptor = getDescriptor().getMessageTypes().get(50); internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsRestoreSnapshotDoneRequest_descriptor, - new java.lang.String[] { "Snapshot", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneRequest.Builder.class); + new java.lang.String[] { "Snapshot", }); internal_static_IsRestoreSnapshotDoneResponse_descriptor = getDescriptor().getMessageTypes().get(51); internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsRestoreSnapshotDoneResponse_descriptor, - new java.lang.String[] { "Done", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse.Builder.class); + new java.lang.String[] { "Done", }); return null; } }; @@ -23226,6 +27053,6 @@ public final class MasterAdminProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java index 1f36d15..b89ddc5 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java @@ -10,50 +10,129 @@ public final class MasterMonitorProtos { } public interface GetSchemaAlterStatusRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); } + /** + * Protobuf type {@code GetSchemaAlterStatusRequest} + */ public static final class GetSchemaAlterStatusRequest extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusRequestOrBuilder { // Use GetSchemaAlterStatusRequest.newBuilder() to construct. - private GetSchemaAlterStatusRequest(Builder builder) { + private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetSchemaAlterStatusRequest(boolean noInit) {} - + private GetSchemaAlterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetSchemaAlterStatusRequest defaultInstance; public static GetSchemaAlterStatusRequest getDefaultInstance() { return defaultInstance; } - + public GetSchemaAlterStatusRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetSchemaAlterStatusRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetSchemaAlterStatusRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetSchemaAlterStatusRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -61,7 +140,7 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -69,7 +148,7 @@ public final class MasterMonitorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -78,12 +157,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -93,14 +172,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -110,7 +189,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -121,9 +200,13 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -131,89 +214,79 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetSchemaAlterStatusRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequestOrBuilder { @@ -221,18 +294,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -243,27 +319,27 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); if (!result.isInitialized()) { @@ -271,17 +347,7 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest(this); int from_bitField0_ = bitField0_; @@ -294,7 +360,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)other); @@ -303,7 +369,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -312,7 +378,7 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -320,49 +386,43 @@ public final class MasterMonitorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -372,84 +432,183 @@ public final class MasterMonitorProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) } - + static { defaultInstance = new GetSchemaAlterStatusRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) } - + public interface GetSchemaAlterStatusResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 yetToUpdateRegions = 1; + /** + * optional uint32 yetToUpdateRegions = 1; + */ boolean hasYetToUpdateRegions(); + /** + * optional uint32 yetToUpdateRegions = 1; + */ int getYetToUpdateRegions(); - + // optional uint32 totalRegions = 2; + /** + * optional uint32 totalRegions = 2; + */ boolean hasTotalRegions(); + /** + * optional uint32 totalRegions = 2; + */ int getTotalRegions(); } + /** + * Protobuf type {@code GetSchemaAlterStatusResponse} + */ public static final class GetSchemaAlterStatusResponse extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusResponseOrBuilder { // Use GetSchemaAlterStatusResponse.newBuilder() to construct. - private GetSchemaAlterStatusResponse(Builder builder) { + private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetSchemaAlterStatusResponse(boolean noInit) {} - + private GetSchemaAlterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetSchemaAlterStatusResponse defaultInstance; public static GetSchemaAlterStatusResponse getDefaultInstance() { return defaultInstance; } - + public GetSchemaAlterStatusResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetSchemaAlterStatusResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalRegions_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetSchemaAlterStatusResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetSchemaAlterStatusResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional uint32 yetToUpdateRegions = 1; public static final int YETTOUPDATEREGIONS_FIELD_NUMBER = 1; private int yetToUpdateRegions_; + /** + * optional uint32 yetToUpdateRegions = 1; + */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 yetToUpdateRegions = 1; + */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } - + // optional uint32 totalRegions = 2; public static final int TOTALREGIONS_FIELD_NUMBER = 2; private int totalRegions_; + /** + * optional uint32 totalRegions = 2; + */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 totalRegions = 2; + */ public int getTotalRegions() { return totalRegions_; } - + private void initFields() { yetToUpdateRegions_ = 0; totalRegions_ = 0; @@ -458,11 +617,11 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -474,12 +633,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -493,14 +652,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -510,7 +669,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) obj; - + boolean result = true; result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); if (hasYetToUpdateRegions()) { @@ -526,9 +685,13 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasYetToUpdateRegions()) { @@ -540,89 +703,79 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTotalRegions(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetSchemaAlterStatusResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponseOrBuilder { @@ -630,18 +783,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -652,7 +808,7 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); yetToUpdateRegions_ = 0; @@ -661,20 +817,20 @@ public final class MasterMonitorProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); if (!result.isInitialized()) { @@ -682,17 +838,7 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse(this); int from_bitField0_ = bitField0_; @@ -709,7 +855,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)other); @@ -718,7 +864,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; if (other.hasYetToUpdateRegions()) { @@ -730,153 +876,261 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - yetToUpdateRegions_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - totalRegions_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint32 yetToUpdateRegions = 1; private int yetToUpdateRegions_ ; + /** + * optional uint32 yetToUpdateRegions = 1; + */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 yetToUpdateRegions = 1; + */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } + /** + * optional uint32 yetToUpdateRegions = 1; + */ public Builder setYetToUpdateRegions(int value) { bitField0_ |= 0x00000001; yetToUpdateRegions_ = value; onChanged(); return this; } + /** + * optional uint32 yetToUpdateRegions = 1; + */ public Builder clearYetToUpdateRegions() { bitField0_ = (bitField0_ & ~0x00000001); yetToUpdateRegions_ = 0; onChanged(); return this; } - + // optional uint32 totalRegions = 2; private int totalRegions_ ; + /** + * optional uint32 totalRegions = 2; + */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional uint32 totalRegions = 2; + */ public int getTotalRegions() { return totalRegions_; } + /** + * optional uint32 totalRegions = 2; + */ public Builder setTotalRegions(int value) { bitField0_ |= 0x00000002; totalRegions_ = value; onChanged(); return this; } + /** + * optional uint32 totalRegions = 2; + */ public Builder clearTotalRegions() { bitField0_ = (bitField0_ & ~0x00000002); totalRegions_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) } - + static { defaultInstance = new GetSchemaAlterStatusResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) } - + public interface GetTableDescriptorsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated string tableNames = 1; - java.util.List getTableNamesList(); + /** + * repeated string tableNames = 1; + */ + java.util.List + getTableNamesList(); + /** + * repeated string tableNames = 1; + */ int getTableNamesCount(); - String getTableNames(int index); + /** + * repeated string tableNames = 1; + */ + java.lang.String getTableNames(int index); + /** + * repeated string tableNames = 1; + */ + com.google.protobuf.ByteString + getTableNamesBytes(int index); } + /** + * Protobuf type {@code GetTableDescriptorsRequest} + */ public static final class GetTableDescriptorsRequest extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsRequestOrBuilder { // Use GetTableDescriptorsRequest.newBuilder() to construct. - private GetTableDescriptorsRequest(Builder builder) { + private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetTableDescriptorsRequest(boolean noInit) {} - + private GetTableDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetTableDescriptorsRequest defaultInstance; public static GetTableDescriptorsRequest getDefaultInstance() { return defaultInstance; } - + public GetTableDescriptorsRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTableDescriptorsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + tableNames_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.UnmodifiableLazyStringList(tableNames_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetTableDescriptorsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableDescriptorsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + // repeated string tableNames = 1; public static final int TABLENAMES_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList tableNames_; - public java.util.List + /** + * repeated string tableNames = 1; + */ + public java.util.List getTableNamesList() { return tableNames_; } + /** + * repeated string tableNames = 1; + */ public int getTableNamesCount() { return tableNames_.size(); } - public String getTableNames(int index) { + /** + * repeated string tableNames = 1; + */ + public java.lang.String getTableNames(int index) { return tableNames_.get(index); } - + /** + * repeated string tableNames = 1; + */ + public com.google.protobuf.ByteString + getTableNamesBytes(int index) { + return tableNames_.getByteString(index); + } + private void initFields() { tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @@ -884,11 +1138,11 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -897,12 +1151,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -917,14 +1171,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -934,7 +1188,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) obj; - + boolean result = true; result = result && getTableNamesList() .equals(other.getTableNamesList()); @@ -942,9 +1196,13 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableNamesCount() > 0) { @@ -952,89 +1210,79 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetTableDescriptorsRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequestOrBuilder { @@ -1042,18 +1290,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1064,27 +1315,27 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1092,17 +1343,7 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest(this); int from_bitField0_ = bitField0_; @@ -1115,7 +1356,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)other); @@ -1124,7 +1365,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; if (!other.tableNames_.isEmpty()) { @@ -1140,45 +1381,30 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureTableNamesIsMutable(); - tableNames_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated string tableNames = 1; private com.google.protobuf.LazyStringList tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureTableNamesIsMutable() { @@ -1187,18 +1413,37 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; } } - public java.util.List + /** + * repeated string tableNames = 1; + */ + public java.util.List getTableNamesList() { return java.util.Collections.unmodifiableList(tableNames_); } + /** + * repeated string tableNames = 1; + */ public int getTableNamesCount() { return tableNames_.size(); } - public String getTableNames(int index) { + /** + * repeated string tableNames = 1; + */ + public java.lang.String getTableNames(int index) { return tableNames_.get(index); } + /** + * repeated string tableNames = 1; + */ + public com.google.protobuf.ByteString + getTableNamesBytes(int index) { + return tableNames_.getByteString(index); + } + /** + * repeated string tableNames = 1; + */ public Builder setTableNames( - int index, String value) { + int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1207,7 +1452,11 @@ public final class MasterMonitorProtos { onChanged(); return this; } - public Builder addTableNames(String value) { + /** + * repeated string tableNames = 1; + */ + public Builder addTableNames( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1216,98 +1465,215 @@ public final class MasterMonitorProtos { onChanged(); return this; } + /** + * repeated string tableNames = 1; + */ public Builder addAllTableNames( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureTableNamesIsMutable(); super.addAll(values, tableNames_); onChanged(); return this; } + /** + * repeated string tableNames = 1; + */ public Builder clearTableNames() { tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - void addTableNames(com.google.protobuf.ByteString value) { - ensureTableNamesIsMutable(); + /** + * repeated string tableNames = 1; + */ + public Builder addTableNamesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableNamesIsMutable(); tableNames_.add(value); onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) } - + static { defaultInstance = new GetTableDescriptorsRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) } - + public interface GetTableDescriptorsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .TableSchema tableSchema = 1; + /** + * repeated .TableSchema tableSchema = 1; + */ java.util.List getTableSchemaList(); + /** + * repeated .TableSchema tableSchema = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); + /** + * repeated .TableSchema tableSchema = 1; + */ int getTableSchemaCount(); + /** + * repeated .TableSchema tableSchema = 1; + */ java.util.List getTableSchemaOrBuilderList(); + /** + * repeated .TableSchema tableSchema = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index); } + /** + * Protobuf type {@code GetTableDescriptorsResponse} + */ public static final class GetTableDescriptorsResponse extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsResponseOrBuilder { // Use GetTableDescriptorsResponse.newBuilder() to construct. - private GetTableDescriptorsResponse(Builder builder) { + private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetTableDescriptorsResponse(boolean noInit) {} - + private GetTableDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetTableDescriptorsResponse defaultInstance; public static GetTableDescriptorsResponse getDefaultInstance() { return defaultInstance; } - + public GetTableDescriptorsResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTableDescriptorsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetTableDescriptorsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableDescriptorsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .TableSchema tableSchema = 1; public static final int TABLESCHEMA_FIELD_NUMBER = 1; private java.util.List tableSchema_; + /** + * repeated .TableSchema tableSchema = 1; + */ public java.util.List getTableSchemaList() { return tableSchema_; } + /** + * repeated .TableSchema tableSchema = 1; + */ public java.util.List getTableSchemaOrBuilderList() { return tableSchema_; } + /** + * repeated .TableSchema tableSchema = 1; + */ public int getTableSchemaCount() { return tableSchema_.size(); } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { return tableSchema_.get(index); } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { return tableSchema_.get(index); } - + private void initFields() { tableSchema_ = java.util.Collections.emptyList(); } @@ -1315,7 +1681,7 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { memoizedIsInitialized = 0; @@ -1325,7 +1691,7 @@ public final class MasterMonitorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1334,12 +1700,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < tableSchema_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -1349,14 +1715,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1366,7 +1732,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) obj; - + boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); @@ -1374,9 +1740,13 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableSchemaCount() > 0) { @@ -1384,89 +1754,79 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getTableSchemaList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetTableDescriptorsResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponseOrBuilder { @@ -1474,18 +1834,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1497,7 +1860,7 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { @@ -1508,20 +1871,20 @@ public final class MasterMonitorProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1529,17 +1892,7 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse(this); int from_bitField0_ = bitField0_; @@ -1555,7 +1908,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)other); @@ -1564,7 +1917,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; if (tableSchemaBuilder_ == null) { @@ -1596,7 +1949,7 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { @@ -1606,42 +1959,26 @@ public final class MasterMonitorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addTableSchema(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .TableSchema tableSchema = 1; private java.util.List tableSchema_ = java.util.Collections.emptyList(); @@ -1651,10 +1988,13 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; - + + /** + * repeated .TableSchema tableSchema = 1; + */ public java.util.List getTableSchemaList() { if (tableSchemaBuilder_ == null) { return java.util.Collections.unmodifiableList(tableSchema_); @@ -1662,6 +2002,9 @@ public final class MasterMonitorProtos { return tableSchemaBuilder_.getMessageList(); } } + /** + * repeated .TableSchema tableSchema = 1; + */ public int getTableSchemaCount() { if (tableSchemaBuilder_ == null) { return tableSchema_.size(); @@ -1669,6 +2012,9 @@ public final class MasterMonitorProtos { return tableSchemaBuilder_.getCount(); } } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { if (tableSchemaBuilder_ == null) { return tableSchema_.get(index); @@ -1676,6 +2022,9 @@ public final class MasterMonitorProtos { return tableSchemaBuilder_.getMessage(index); } } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { @@ -1690,6 +2039,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { @@ -1701,6 +2053,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { @@ -1714,6 +2069,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { @@ -1728,6 +2086,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder addTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { @@ -1739,6 +2100,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { @@ -1750,6 +2114,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder addAllTableSchema( java.lang.Iterable values) { if (tableSchemaBuilder_ == null) { @@ -1761,6 +2128,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = java.util.Collections.emptyList(); @@ -1771,6 +2141,9 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public Builder removeTableSchema(int index) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); @@ -1781,10 +2154,16 @@ public final class MasterMonitorProtos { } return this; } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().getBuilder(index); } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { if (tableSchemaBuilder_ == null) { @@ -1792,6 +2171,9 @@ public final class MasterMonitorProtos { return tableSchemaBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .TableSchema tableSchema = 1; + */ public java.util.List getTableSchemaOrBuilderList() { if (tableSchemaBuilder_ != null) { @@ -1800,15 +2182,24 @@ public final class MasterMonitorProtos { return java.util.Collections.unmodifiableList(tableSchema_); } } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { return getTableSchemaFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } + /** + * repeated .TableSchema tableSchema = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } + /** + * repeated .TableSchema tableSchema = 1; + */ public java.util.List getTableSchemaBuilderList() { return getTableSchemaFieldBuilder().getBuilderList(); @@ -1827,84 +2218,145 @@ public final class MasterMonitorProtos { } return tableSchemaBuilder_; } - + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse) } - + static { defaultInstance = new GetTableDescriptorsResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse) } - + public interface GetClusterStatusRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code GetClusterStatusRequest} + */ public static final class GetClusterStatusRequest extends com.google.protobuf.GeneratedMessage implements GetClusterStatusRequestOrBuilder { // Use GetClusterStatusRequest.newBuilder() to construct. - private GetClusterStatusRequest(Builder builder) { + private GetClusterStatusRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetClusterStatusRequest(boolean noInit) {} - + private GetClusterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetClusterStatusRequest defaultInstance; public static GetClusterStatusRequest getDefaultInstance() { return defaultInstance; } - + public GetClusterStatusRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetClusterStatusRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetClusterStatusRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetClusterStatusRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1914,101 +2366,95 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetClusterStatusRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequestOrBuilder { @@ -2016,18 +2462,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2038,25 +2487,25 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2064,23 +2513,13 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)other); @@ -2089,106 +2528,189 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:GetClusterStatusRequest) } - + static { defaultInstance = new GetClusterStatusRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetClusterStatusRequest) } - + public interface GetClusterStatusResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ClusterStatus clusterStatus = 1; + /** + * required .ClusterStatus clusterStatus = 1; + */ boolean hasClusterStatus(); + /** + * required .ClusterStatus clusterStatus = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus(); + /** + * required .ClusterStatus clusterStatus = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder(); } + /** + * Protobuf type {@code GetClusterStatusResponse} + */ public static final class GetClusterStatusResponse extends com.google.protobuf.GeneratedMessage implements GetClusterStatusResponseOrBuilder { // Use GetClusterStatusResponse.newBuilder() to construct. - private GetClusterStatusResponse(Builder builder) { + private GetClusterStatusResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetClusterStatusResponse(boolean noInit) {} - + private GetClusterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetClusterStatusResponse defaultInstance; public static GetClusterStatusResponse getDefaultInstance() { return defaultInstance; } - + public GetClusterStatusResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetClusterStatusResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = clusterStatus_.toBuilder(); + } + clusterStatus_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(clusterStatus_); + clusterStatus_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetClusterStatusResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetClusterStatusResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ClusterStatus clusterStatus = 1; public static final int CLUSTERSTATUS_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_; + /** + * required .ClusterStatus clusterStatus = 1; + */ public boolean hasClusterStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ClusterStatus clusterStatus = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { return clusterStatus_; } + /** + * required .ClusterStatus clusterStatus = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { return clusterStatus_; } - + private void initFields() { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); } @@ -2196,7 +2718,7 @@ public final class MasterMonitorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasClusterStatus()) { memoizedIsInitialized = 0; return false; @@ -2208,7 +2730,7 @@ public final class MasterMonitorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2217,12 +2739,12 @@ public final class MasterMonitorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2232,14 +2754,14 @@ public final class MasterMonitorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2249,7 +2771,7 @@ public final class MasterMonitorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) obj; - + boolean result = true; result = result && (hasClusterStatus() == other.hasClusterStatus()); if (hasClusterStatus()) { @@ -2260,9 +2782,13 @@ public final class MasterMonitorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClusterStatus()) { @@ -2270,89 +2796,79 @@ public final class MasterMonitorProtos { hash = (53 * hash) + getClusterStatus().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetClusterStatusResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponseOrBuilder { @@ -2360,18 +2876,21 @@ public final class MasterMonitorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2383,7 +2902,7 @@ public final class MasterMonitorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (clusterStatusBuilder_ == null) { @@ -2394,20 +2913,20 @@ public final class MasterMonitorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2415,17 +2934,7 @@ public final class MasterMonitorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse(this); int from_bitField0_ = bitField0_; @@ -2442,7 +2951,7 @@ public final class MasterMonitorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse)other); @@ -2451,7 +2960,7 @@ public final class MasterMonitorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()) return this; if (other.hasClusterStatus()) { @@ -2460,7 +2969,7 @@ public final class MasterMonitorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasClusterStatus()) { @@ -2472,52 +2981,39 @@ public final class MasterMonitorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(); - if (hasClusterStatus()) { - subBuilder.mergeFrom(getClusterStatus()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterStatus(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ClusterStatus clusterStatus = 1; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_; + /** + * required .ClusterStatus clusterStatus = 1; + */ public boolean hasClusterStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ClusterStatus clusterStatus = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { if (clusterStatusBuilder_ == null) { return clusterStatus_; @@ -2525,6 +3021,9 @@ public final class MasterMonitorProtos { return clusterStatusBuilder_.getMessage(); } } + /** + * required .ClusterStatus clusterStatus = 1; + */ public Builder setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { if (clusterStatusBuilder_ == null) { if (value == null) { @@ -2538,6 +3037,9 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ClusterStatus clusterStatus = 1; + */ public Builder setClusterStatus( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue) { if (clusterStatusBuilder_ == null) { @@ -2549,6 +3051,9 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ClusterStatus clusterStatus = 1; + */ public Builder mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { if (clusterStatusBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2565,6 +3070,9 @@ public final class MasterMonitorProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ClusterStatus clusterStatus = 1; + */ public Builder clearClusterStatus() { if (clusterStatusBuilder_ == null) { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); @@ -2575,11 +3083,17 @@ public final class MasterMonitorProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ClusterStatus clusterStatus = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder getClusterStatusBuilder() { bitField0_ |= 0x00000001; onChanged(); return getClusterStatusFieldBuilder().getBuilder(); } + /** + * required .ClusterStatus clusterStatus = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { if (clusterStatusBuilder_ != null) { return clusterStatusBuilder_.getMessageOrBuilder(); @@ -2587,6 +3101,9 @@ public final class MasterMonitorProtos { return clusterStatus_; } } + /** + * required .ClusterStatus clusterStatus = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> getClusterStatusFieldBuilder() { @@ -2600,40 +3117,64 @@ public final class MasterMonitorProtos { } return clusterStatusBuilder_; } - + // @@protoc_insertion_point(builder_scope:GetClusterStatusResponse) } - + static { defaultInstance = new GetClusterStatusResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetClusterStatusResponse) } - + + /** + * Protobuf service {@code MasterMonitorService} + */ public static abstract class MasterMonitorService implements com.google.protobuf.Service { protected MasterMonitorService() {} - + public interface Interface { + /** + * rpc getSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse); + * + *
+       ** Used by the client to get the number of regions that have received the updated schema 
+       * 
+ */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse); + * + *
+       ** Get list of TableDescriptors for requested tables. 
+       * 
+ */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse); + * + *
+       ** Return cluster status. 
+       * 
+ */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterMonitorService() { @@ -2644,7 +3185,7 @@ public final class MasterMonitorProtos { com.google.protobuf.RpcCallback done) { impl.getSchemaAlterStatus(controller, request, done); } - + @java.lang.Override public void getTableDescriptors( com.google.protobuf.RpcController controller, @@ -2652,7 +3193,7 @@ public final class MasterMonitorProtos { com.google.protobuf.RpcCallback done) { impl.getTableDescriptors(controller, request, done); } - + @java.lang.Override public void getClusterStatus( com.google.protobuf.RpcController controller, @@ -2660,10 +3201,10 @@ public final class MasterMonitorProtos { com.google.protobuf.RpcCallback done) { impl.getClusterStatus(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -2671,7 +3212,7 @@ public final class MasterMonitorProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -2693,7 +3234,7 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -2713,7 +3254,7 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -2733,25 +3274,46 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc getSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse); + * + *
+     ** Used by the client to get the number of regions that have received the updated schema 
+     * 
+ */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse); + * + *
+     ** Get list of TableDescriptors for requested tables. 
+     * 
+ */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse); + * + *
+     ** Return cluster status. 
+     * 
+ */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -2761,7 +3323,7 @@ public final class MasterMonitorProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -2793,7 +3355,7 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -2813,7 +3375,7 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -2833,23 +3395,23 @@ public final class MasterMonitorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.MasterMonitorService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, @@ -2864,7 +3426,7 @@ public final class MasterMonitorProtos { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance())); } - + public void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, @@ -2879,7 +3441,7 @@ public final class MasterMonitorProtos { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance())); } - + public void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, @@ -2895,36 +3457,36 @@ public final class MasterMonitorProtos { org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) @@ -2935,8 +3497,8 @@ public final class MasterMonitorProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) @@ -2947,8 +3509,8 @@ public final class MasterMonitorProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) @@ -2959,10 +3521,12 @@ public final class MasterMonitorProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:MasterMonitorService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_GetSchemaAlterStatusRequest_descriptor; private static @@ -2993,7 +3557,7 @@ public final class MasterMonitorProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetClusterStatusResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -3032,49 +3596,37 @@ public final class MasterMonitorProtos { internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetSchemaAlterStatusRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); + new java.lang.String[] { "TableName", }); internal_static_GetSchemaAlterStatusResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetSchemaAlterStatusResponse_descriptor, - new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); + new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }); internal_static_GetTableDescriptorsRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableDescriptorsRequest_descriptor, - new java.lang.String[] { "TableNames", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); + new java.lang.String[] { "TableNames", }); internal_static_GetTableDescriptorsResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableDescriptorsResponse_descriptor, - new java.lang.String[] { "TableSchema", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); + new java.lang.String[] { "TableSchema", }); internal_static_GetClusterStatusRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_GetClusterStatusRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetClusterStatusRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.Builder.class); + new java.lang.String[] { }); internal_static_GetClusterStatusResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_GetClusterStatusResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetClusterStatusResponse_descriptor, - new java.lang.String[] { "ClusterStatus", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.Builder.class); + new java.lang.String[] { "ClusterStatus", }); return null; } }; @@ -3085,6 +3637,6 @@ public final class MasterMonitorProtos { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index b8c322f..f2f84cb 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -11,69 +11,130 @@ public final class MasterProtos { public interface IsMasterRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code IsMasterRunningRequest} + */ public static final class IsMasterRunningRequest extends com.google.protobuf.GeneratedMessage implements IsMasterRunningRequestOrBuilder { // Use IsMasterRunningRequest.newBuilder() to construct. - private IsMasterRunningRequest(Builder builder) { + private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsMasterRunningRequest(boolean noInit) {} - + private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsMasterRunningRequest defaultInstance; public static IsMasterRunningRequest getDefaultInstance() { return defaultInstance; } - + public IsMasterRunningRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsMasterRunningRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsMasterRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsMasterRunningRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -83,101 +144,95 @@ public final class MasterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsMasterRunningRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { @@ -185,18 +240,21 @@ public final class MasterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -207,25 +265,25 @@ public final class MasterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); if (!result.isInitialized()) { @@ -233,23 +291,13 @@ public final class MasterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)other); @@ -258,102 +306,171 @@ public final class MasterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:IsMasterRunningRequest) } - + static { defaultInstance = new IsMasterRunningRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsMasterRunningRequest) } - + public interface IsMasterRunningResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool isMasterRunning = 1; + /** + * required bool isMasterRunning = 1; + */ boolean hasIsMasterRunning(); + /** + * required bool isMasterRunning = 1; + */ boolean getIsMasterRunning(); } + /** + * Protobuf type {@code IsMasterRunningResponse} + */ public static final class IsMasterRunningResponse extends com.google.protobuf.GeneratedMessage implements IsMasterRunningResponseOrBuilder { // Use IsMasterRunningResponse.newBuilder() to construct. - private IsMasterRunningResponse(Builder builder) { + private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IsMasterRunningResponse(boolean noInit) {} - + private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IsMasterRunningResponse defaultInstance; public static IsMasterRunningResponse getDefaultInstance() { return defaultInstance; } - + public IsMasterRunningResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsMasterRunningResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + isMasterRunning_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsMasterRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsMasterRunningResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool isMasterRunning = 1; public static final int ISMASTERRUNNING_FIELD_NUMBER = 1; private boolean isMasterRunning_; + /** + * required bool isMasterRunning = 1; + */ public boolean hasIsMasterRunning() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool isMasterRunning = 1; + */ public boolean getIsMasterRunning() { return isMasterRunning_; } - + private void initFields() { isMasterRunning_ = false; } @@ -361,7 +478,7 @@ public final class MasterProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasIsMasterRunning()) { memoizedIsInitialized = 0; return false; @@ -369,7 +486,7 @@ public final class MasterProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -378,12 +495,12 @@ public final class MasterProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -393,14 +510,14 @@ public final class MasterProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -410,7 +527,7 @@ public final class MasterProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) obj; - + boolean result = true; result = result && (hasIsMasterRunning() == other.hasIsMasterRunning()); if (hasIsMasterRunning()) { @@ -421,9 +538,13 @@ public final class MasterProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIsMasterRunning()) { @@ -431,89 +552,79 @@ public final class MasterProtos { hash = (53 * hash) + hashBoolean(getIsMasterRunning()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IsMasterRunningResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { @@ -521,18 +632,21 @@ public final class MasterProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -543,27 +657,27 @@ public final class MasterProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); isMasterRunning_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); if (!result.isInitialized()) { @@ -571,17 +685,7 @@ public final class MasterProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this); int from_bitField0_ = bitField0_; @@ -594,7 +698,7 @@ public final class MasterProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)other); @@ -603,7 +707,7 @@ public final class MasterProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()) return this; if (other.hasIsMasterRunning()) { @@ -612,7 +716,7 @@ public final class MasterProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasIsMasterRunning()) { @@ -620,85 +724,92 @@ public final class MasterProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - isMasterRunning_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool isMasterRunning = 1; private boolean isMasterRunning_ ; + /** + * required bool isMasterRunning = 1; + */ public boolean hasIsMasterRunning() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool isMasterRunning = 1; + */ public boolean getIsMasterRunning() { return isMasterRunning_; } + /** + * required bool isMasterRunning = 1; + */ public Builder setIsMasterRunning(boolean value) { bitField0_ |= 0x00000001; isMasterRunning_ = value; onChanged(); return this; } + /** + * required bool isMasterRunning = 1; + */ public Builder clearIsMasterRunning() { bitField0_ = (bitField0_ & ~0x00000001); isMasterRunning_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IsMasterRunningResponse) } - + static { defaultInstance = new IsMasterRunningResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IsMasterRunningResponse) } - + + /** + * Protobuf service {@code MasterService} + */ public static abstract class MasterService implements com.google.protobuf.Service { protected MasterService() {} - + public interface Interface { + /** + * rpc isMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse); + * + *
+       ** return true if master is available 
+       * 
+ */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterService() { @@ -709,10 +820,10 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done) { impl.isMasterRunning(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -720,7 +831,7 @@ public final class MasterProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -738,7 +849,7 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -754,7 +865,7 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -770,15 +881,22 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc isMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse); + * + *
+     ** return true if master is available 
+     * 
+ */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -788,7 +906,7 @@ public final class MasterProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -810,7 +928,7 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -826,7 +944,7 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -842,23 +960,23 @@ public final class MasterProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, @@ -874,26 +992,26 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) @@ -904,10 +1022,12 @@ public final class MasterProtos { request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:MasterService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_IsMasterRunningRequest_descriptor; private static @@ -918,7 +1038,7 @@ public final class MasterProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsMasterRunningResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -945,17 +1065,13 @@ public final class MasterProtos { internal_static_IsMasterRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); + new java.lang.String[] { }); internal_static_IsMasterRunningResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_IsMasterRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningResponse_descriptor, - new java.lang.String[] { "IsMasterRunning", }, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); + new java.lang.String[] { "IsMasterRunning", }); return null; } }; @@ -964,6 +1080,6 @@ public final class MasterProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java index 3227635..b4bd4f7 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java @@ -10,66 +10,169 @@ public final class MultiRowMutation { } public interface MultiMutateRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .MutationProto mutationRequest = 1; + /** + * repeated .MutationProto mutationRequest = 1; + */ java.util.List getMutationRequestList(); + /** + * repeated .MutationProto mutationRequest = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index); + /** + * repeated .MutationProto mutationRequest = 1; + */ int getMutationRequestCount(); + /** + * repeated .MutationProto mutationRequest = 1; + */ java.util.List getMutationRequestOrBuilderList(); + /** + * repeated .MutationProto mutationRequest = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( int index); } + /** + * Protobuf type {@code MultiMutateRequest} + */ public static final class MultiMutateRequest extends com.google.protobuf.GeneratedMessage implements MultiMutateRequestOrBuilder { // Use MultiMutateRequest.newBuilder() to construct. - private MultiMutateRequest(Builder builder) { + private MultiMutateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiMutateRequest(boolean noInit) {} - + private MultiMutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiMutateRequest defaultInstance; public static MultiMutateRequest getDefaultInstance() { return defaultInstance; } - + public MultiMutateRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiMutateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + mutationRequest_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiMutateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiMutateRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + // repeated .MutationProto mutationRequest = 1; public static final int MUTATIONREQUEST_FIELD_NUMBER = 1; private java.util.List mutationRequest_; + /** + * repeated .MutationProto mutationRequest = 1; + */ public java.util.List getMutationRequestList() { return mutationRequest_; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public java.util.List getMutationRequestOrBuilderList() { return mutationRequest_; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public int getMutationRequestCount() { return mutationRequest_.size(); } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { return mutationRequest_.get(index); } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( int index) { return mutationRequest_.get(index); } - + private void initFields() { mutationRequest_ = java.util.Collections.emptyList(); } @@ -77,7 +180,7 @@ public final class MultiRowMutation { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getMutationRequestCount(); i++) { if (!getMutationRequest(i).isInitialized()) { memoizedIsInitialized = 0; @@ -87,7 +190,7 @@ public final class MultiRowMutation { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -96,12 +199,12 @@ public final class MultiRowMutation { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < mutationRequest_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -111,14 +214,14 @@ public final class MultiRowMutation { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -128,7 +231,7 @@ public final class MultiRowMutation { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) obj; - + boolean result = true; result = result && getMutationRequestList() .equals(other.getMutationRequestList()); @@ -136,9 +239,13 @@ public final class MultiRowMutation { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getMutationRequestCount() > 0) { @@ -146,89 +253,79 @@ public final class MultiRowMutation { hash = (53 * hash) + getMutationRequestList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiMutateRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder { @@ -236,18 +333,21 @@ public final class MultiRowMutation { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -259,7 +359,7 @@ public final class MultiRowMutation { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (mutationRequestBuilder_ == null) { @@ -270,20 +370,20 @@ public final class MultiRowMutation { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest build() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); if (!result.isInitialized()) { @@ -291,17 +391,7 @@ public final class MultiRowMutation { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this); int from_bitField0_ = bitField0_; @@ -317,7 +407,7 @@ public final class MultiRowMutation { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)other); @@ -326,7 +416,7 @@ public final class MultiRowMutation { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance()) return this; if (mutationRequestBuilder_ == null) { @@ -358,7 +448,7 @@ public final class MultiRowMutation { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getMutationRequestCount(); i++) { if (!getMutationRequest(i).isInitialized()) { @@ -368,42 +458,26 @@ public final class MultiRowMutation { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMutationRequest(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .MutationProto mutationRequest = 1; private java.util.List mutationRequest_ = java.util.Collections.emptyList(); @@ -413,10 +487,13 @@ public final class MultiRowMutation { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_; - + + /** + * repeated .MutationProto mutationRequest = 1; + */ public java.util.List getMutationRequestList() { if (mutationRequestBuilder_ == null) { return java.util.Collections.unmodifiableList(mutationRequest_); @@ -424,6 +501,9 @@ public final class MultiRowMutation { return mutationRequestBuilder_.getMessageList(); } } + /** + * repeated .MutationProto mutationRequest = 1; + */ public int getMutationRequestCount() { if (mutationRequestBuilder_ == null) { return mutationRequest_.size(); @@ -431,6 +511,9 @@ public final class MultiRowMutation { return mutationRequestBuilder_.getCount(); } } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { if (mutationRequestBuilder_ == null) { return mutationRequest_.get(index); @@ -438,6 +521,9 @@ public final class MultiRowMutation { return mutationRequestBuilder_.getMessage(index); } } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder setMutationRequest( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationRequestBuilder_ == null) { @@ -452,6 +538,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder setMutationRequest( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationRequestBuilder_ == null) { @@ -463,6 +552,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationRequestBuilder_ == null) { if (value == null) { @@ -476,6 +568,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder addMutationRequest( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { if (mutationRequestBuilder_ == null) { @@ -490,6 +585,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder addMutationRequest( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationRequestBuilder_ == null) { @@ -501,6 +599,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder addMutationRequest( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationRequestBuilder_ == null) { @@ -512,6 +613,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder addAllMutationRequest( java.lang.Iterable values) { if (mutationRequestBuilder_ == null) { @@ -523,6 +627,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder clearMutationRequest() { if (mutationRequestBuilder_ == null) { mutationRequest_ = java.util.Collections.emptyList(); @@ -533,6 +640,9 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public Builder removeMutationRequest(int index) { if (mutationRequestBuilder_ == null) { ensureMutationRequestIsMutable(); @@ -543,10 +653,16 @@ public final class MultiRowMutation { } return this; } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder( int index) { return getMutationRequestFieldBuilder().getBuilder(index); } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( int index) { if (mutationRequestBuilder_ == null) { @@ -554,6 +670,9 @@ public final class MultiRowMutation { return mutationRequestBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .MutationProto mutationRequest = 1; + */ public java.util.List getMutationRequestOrBuilderList() { if (mutationRequestBuilder_ != null) { @@ -562,15 +681,24 @@ public final class MultiRowMutation { return java.util.Collections.unmodifiableList(mutationRequest_); } } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() { return getMutationRequestFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); } + /** + * repeated .MutationProto mutationRequest = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder( int index) { return getMutationRequestFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); } + /** + * repeated .MutationProto mutationRequest = 1; + */ public java.util.List getMutationRequestBuilderList() { return getMutationRequestFieldBuilder().getBuilderList(); @@ -589,84 +717,145 @@ public final class MultiRowMutation { } return mutationRequestBuilder_; } - + // @@protoc_insertion_point(builder_scope:MultiMutateRequest) } - + static { defaultInstance = new MultiMutateRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiMutateRequest) } - + public interface MultiMutateResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code MultiMutateResponse} + */ public static final class MultiMutateResponse extends com.google.protobuf.GeneratedMessage implements MultiMutateResponseOrBuilder { // Use MultiMutateResponse.newBuilder() to construct. - private MultiMutateResponse(Builder builder) { + private MultiMutateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiMutateResponse(boolean noInit) {} - + private MultiMutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiMutateResponse defaultInstance; public static MultiMutateResponse getDefaultInstance() { return defaultInstance; } - + public MultiMutateResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiMutateResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiMutateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiMutateResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -676,101 +865,95 @@ public final class MultiRowMutation { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiMutateResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder { @@ -778,18 +961,21 @@ public final class MultiRowMutation { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -800,25 +986,25 @@ public final class MultiRowMutation { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse build() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); if (!result.isInitialized()) { @@ -826,23 +1012,13 @@ public final class MultiRowMutation { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)other); @@ -851,68 +1027,64 @@ public final class MultiRowMutation { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:MultiMutateResponse) } - + static { defaultInstance = new MultiMutateResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiMutateResponse) } - + + /** + * Protobuf service {@code MultiRowMutationService} + */ public static abstract class MultiRowMutationService implements com.google.protobuf.Service { protected MultiRowMutationService() {} - + public interface Interface { + /** + * rpc mutateRows(.MultiMutateRequest) returns (.MultiMutateResponse); + */ public abstract void mutateRows( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MultiRowMutationService() { @@ -923,10 +1095,10 @@ public final class MultiRowMutation { com.google.protobuf.RpcCallback done) { impl.mutateRows(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -934,7 +1106,7 @@ public final class MultiRowMutation { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -952,7 +1124,7 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -968,7 +1140,7 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -984,15 +1156,18 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc mutateRows(.MultiMutateRequest) returns (.MultiMutateResponse); + */ public abstract void mutateRows( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -1002,7 +1177,7 @@ public final class MultiRowMutation { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -1024,7 +1199,7 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1040,7 +1215,7 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1056,23 +1231,23 @@ public final class MultiRowMutation { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void mutateRows( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, @@ -1088,26 +1263,26 @@ public final class MultiRowMutation { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) @@ -1118,10 +1293,12 @@ public final class MultiRowMutation { request, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:MultiRowMutationService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_MultiMutateRequest_descriptor; private static @@ -1132,7 +1309,7 @@ public final class MultiRowMutation { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MultiMutateResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1159,17 +1336,13 @@ public final class MultiRowMutation { internal_static_MultiMutateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiMutateRequest_descriptor, - new java.lang.String[] { "MutationRequest", }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); + new java.lang.String[] { "MutationRequest", }); internal_static_MultiMutateResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_MultiMutateResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiMutateResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -1179,6 +1352,6 @@ public final class MultiRowMutation { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java index 18582d9..91c74bb 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java @@ -11,69 +11,130 @@ public final class MultiRowMutationProcessorProtos { public interface MultiRowMutationProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code MultiRowMutationProcessorRequest} + */ public static final class MultiRowMutationProcessorRequest extends com.google.protobuf.GeneratedMessage implements MultiRowMutationProcessorRequestOrBuilder { // Use MultiRowMutationProcessorRequest.newBuilder() to construct. - private MultiRowMutationProcessorRequest(Builder builder) { + private MultiRowMutationProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiRowMutationProcessorRequest(boolean noInit) {} - + private MultiRowMutationProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiRowMutationProcessorRequest defaultInstance; public static MultiRowMutationProcessorRequest getDefaultInstance() { return defaultInstance; } - + public MultiRowMutationProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiRowMutationProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiRowMutationProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRowMutationProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -83,101 +144,95 @@ public final class MultiRowMutationProcessorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiRowMutationProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder { @@ -185,18 +240,21 @@ public final class MultiRowMutationProcessorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -207,25 +265,25 @@ public final class MultiRowMutationProcessorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest build() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -233,23 +291,13 @@ public final class MultiRowMutationProcessorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest)other); @@ -258,122 +306,173 @@ public final class MultiRowMutationProcessorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest) } - + static { defaultInstance = new MultiRowMutationProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest) } - + public interface MultiRowMutationProcessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code MultiRowMutationProcessorResponse} + */ public static final class MultiRowMutationProcessorResponse extends com.google.protobuf.GeneratedMessage implements MultiRowMutationProcessorResponseOrBuilder { // Use MultiRowMutationProcessorResponse.newBuilder() to construct. - private MultiRowMutationProcessorResponse(Builder builder) { + private MultiRowMutationProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private MultiRowMutationProcessorResponse(boolean noInit) {} - + private MultiRowMutationProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final MultiRowMutationProcessorResponse defaultInstance; public static MultiRowMutationProcessorResponse getDefaultInstance() { return defaultInstance; } - + public MultiRowMutationProcessorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiRowMutationProcessorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiRowMutationProcessorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRowMutationProcessorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -383,101 +482,95 @@ public final class MultiRowMutationProcessorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code MultiRowMutationProcessorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder { @@ -485,18 +578,21 @@ public final class MultiRowMutationProcessorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -507,25 +603,25 @@ public final class MultiRowMutationProcessorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse build() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -533,23 +629,13 @@ public final class MultiRowMutationProcessorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse)other); @@ -558,56 +644,46 @@ public final class MultiRowMutationProcessorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse) } - + static { defaultInstance = new MultiRowMutationProcessorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:MultiRowMutationProcessorResponse) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_MultiRowMutationProcessorRequest_descriptor; private static @@ -618,7 +694,7 @@ public final class MultiRowMutationProcessorProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -643,17 +719,13 @@ public final class MultiRowMutationProcessorProtos { internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRowMutationProcessorRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class); + new java.lang.String[] { }); internal_static_MultiRowMutationProcessorResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRowMutationProcessorResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -662,6 +734,6 @@ public final class MultiRowMutationProcessorProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index b94ef49..4711756 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -10,108 +10,228 @@ public final class RPCProtos { } public interface UserInformationOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string effectiveUser = 1; + /** + * required string effectiveUser = 1; + */ boolean hasEffectiveUser(); - String getEffectiveUser(); - + /** + * required string effectiveUser = 1; + */ + java.lang.String getEffectiveUser(); + /** + * required string effectiveUser = 1; + */ + com.google.protobuf.ByteString + getEffectiveUserBytes(); + // optional string realUser = 2; + /** + * optional string realUser = 2; + */ boolean hasRealUser(); - String getRealUser(); + /** + * optional string realUser = 2; + */ + java.lang.String getRealUser(); + /** + * optional string realUser = 2; + */ + com.google.protobuf.ByteString + getRealUserBytes(); } + /** + * Protobuf type {@code UserInformation} + * + *
+   * User Information proto.  Included in ConnectionHeader on connection setup
+   * 
+ */ public static final class UserInformation extends com.google.protobuf.GeneratedMessage implements UserInformationOrBuilder { // Use UserInformation.newBuilder() to construct. - private UserInformation(Builder builder) { + private UserInformation(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private UserInformation(boolean noInit) {} - + private UserInformation(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final UserInformation defaultInstance; public static UserInformation getDefaultInstance() { return defaultInstance; } - + public UserInformation getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserInformation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + effectiveUser_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + realUser_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserInformation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserInformation(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string effectiveUser = 1; public static final int EFFECTIVEUSER_FIELD_NUMBER = 1; private java.lang.Object effectiveUser_; + /** + * required string effectiveUser = 1; + */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getEffectiveUser() { + /** + * required string effectiveUser = 1; + */ + public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { effectiveUser_ = s; } return s; } } - private com.google.protobuf.ByteString getEffectiveUserBytes() { + /** + * required string effectiveUser = 1; + */ + public com.google.protobuf.ByteString + getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); effectiveUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string realUser = 2; public static final int REALUSER_FIELD_NUMBER = 2; private java.lang.Object realUser_; + /** + * optional string realUser = 2; + */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getRealUser() { + /** + * optional string realUser = 2; + */ + public java.lang.String getRealUser() { java.lang.Object ref = realUser_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { realUser_ = s; } return s; } } - private com.google.protobuf.ByteString getRealUserBytes() { + /** + * optional string realUser = 2; + */ + public com.google.protobuf.ByteString + getRealUserBytes() { java.lang.Object ref = realUser_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); realUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { effectiveUser_ = ""; realUser_ = ""; @@ -120,7 +240,7 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasEffectiveUser()) { memoizedIsInitialized = 0; return false; @@ -128,7 +248,7 @@ public final class RPCProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -140,12 +260,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -159,14 +279,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -176,7 +296,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) obj; - + boolean result = true; result = result && (hasEffectiveUser() == other.hasEffectiveUser()); if (hasEffectiveUser()) { @@ -192,9 +312,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEffectiveUser()) { @@ -206,89 +330,83 @@ public final class RPCProtos { hash = (53 * hash) + getRealUser().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code UserInformation} + * + *
+     * User Information proto.  Included in ConnectionHeader on connection setup
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder { @@ -296,18 +414,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -318,7 +439,7 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); effectiveUser_ = ""; @@ -327,20 +448,20 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); if (!result.isInitialized()) { @@ -348,17 +469,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation(this); int from_bitField0_ = bitField0_; @@ -375,7 +486,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)other); @@ -384,19 +495,23 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; if (other.hasEffectiveUser()) { - setEffectiveUser(other.getEffectiveUser()); + bitField0_ |= 0x00000001; + effectiveUser_ = other.effectiveUser_; + onChanged(); } if (other.hasRealUser()) { - setRealUser(other.getRealUser()); + bitField0_ |= 0x00000002; + realUser_ = other.realUser_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasEffectiveUser()) { @@ -404,62 +519,69 @@ public final class RPCProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - effectiveUser_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - realUser_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string effectiveUser = 1; private java.lang.Object effectiveUser_ = ""; + /** + * required string effectiveUser = 1; + */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getEffectiveUser() { + /** + * required string effectiveUser = 1; + */ + public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); effectiveUser_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string effectiveUser = 1; + */ + public com.google.protobuf.ByteString + getEffectiveUserBytes() { + java.lang.Object ref = effectiveUser_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + effectiveUser_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setEffectiveUser(String value) { + /** + * required string effectiveUser = 1; + */ + public Builder setEffectiveUser( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -468,34 +590,72 @@ public final class RPCProtos { onChanged(); return this; } + /** + * required string effectiveUser = 1; + */ public Builder clearEffectiveUser() { bitField0_ = (bitField0_ & ~0x00000001); effectiveUser_ = getDefaultInstance().getEffectiveUser(); onChanged(); return this; } - void setEffectiveUser(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string effectiveUser = 1; + */ + public Builder setEffectiveUserBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); + return this; } - + // optional string realUser = 2; private java.lang.Object realUser_ = ""; + /** + * optional string realUser = 2; + */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getRealUser() { + /** + * optional string realUser = 2; + */ + public java.lang.String getRealUser() { java.lang.Object ref = realUser_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); realUser_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string realUser = 2; + */ + public com.google.protobuf.ByteString + getRealUserBytes() { + java.lang.Object ref = realUser_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + realUser_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setRealUser(String value) { + /** + * optional string realUser = 2; + */ + public Builder setRealUser( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -504,187 +664,436 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string realUser = 2; + */ public Builder clearRealUser() { bitField0_ = (bitField0_ & ~0x00000002); realUser_ = getDefaultInstance().getRealUser(); onChanged(); return this; } - void setRealUser(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string realUser = 2; + */ + public Builder setRealUserBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; realUser_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:UserInformation) } - + static { defaultInstance = new UserInformation(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:UserInformation) } - + public interface ConnectionHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional .UserInformation userInfo = 1; + /** + * optional .UserInformation userInfo = 1; + */ boolean hasUserInfo(); + /** + * optional .UserInformation userInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(); + /** + * optional .UserInformation userInfo = 1; + */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ boolean hasProtocol(); - String getProtocol(); - + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + java.lang.String getProtocol(); + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + com.google.protobuf.ByteString + getProtocolBytes(); + // optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ boolean hasCellBlockCodecClass(); - String getCellBlockCodecClass(); - + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ + java.lang.String getCellBlockCodecClass(); + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ + com.google.protobuf.ByteString + getCellBlockCodecClassBytes(); + // optional string cellBlockCompressorClass = 4; + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ boolean hasCellBlockCompressorClass(); - String getCellBlockCompressorClass(); + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ + java.lang.String getCellBlockCompressorClass(); + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ + com.google.protobuf.ByteString + getCellBlockCompressorClassBytes(); } + /** + * Protobuf type {@code ConnectionHeader} + * + *
+   * This is sent on connection setup after the connection preamble is sent.
+   * 
+ */ public static final class ConnectionHeader extends com.google.protobuf.GeneratedMessage implements ConnectionHeaderOrBuilder { // Use ConnectionHeader.newBuilder() to construct. - private ConnectionHeader(Builder builder) { + private ConnectionHeader(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ConnectionHeader(boolean noInit) {} - + private ConnectionHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ConnectionHeader defaultInstance; public static ConnectionHeader getDefaultInstance() { return defaultInstance; } - + public ConnectionHeader getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ConnectionHeader( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = userInfo_.toBuilder(); + } + userInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(userInfo_); + userInfo_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + protocol_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + cellBlockCodecClass_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + cellBlockCompressorClass_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ConnectionHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ConnectionHeader(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional .UserInformation userInfo = 1; public static final int USERINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_; + /** + * optional .UserInformation userInfo = 1; + */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .UserInformation userInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { return userInfo_; } + /** + * optional .UserInformation userInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { return userInfo_; } - + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; public static final int PROTOCOL_FIELD_NUMBER = 2; private java.lang.Object protocol_; + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ public boolean hasProtocol() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getProtocol() { + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public java.lang.String getProtocol() { java.lang.Object ref = protocol_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { protocol_ = s; } return s; } } - private com.google.protobuf.ByteString getProtocolBytes() { + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public com.google.protobuf.ByteString + getProtocolBytes() { java.lang.Object ref = protocol_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); protocol_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; public static final int CELLBLOCKCODECCLASS_FIELD_NUMBER = 3; private java.lang.Object cellBlockCodecClass_; + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getCellBlockCodecClass() { + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ + public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { cellBlockCodecClass_ = s; } return s; } } - private com.google.protobuf.ByteString getCellBlockCodecClassBytes() { + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.
+     * 
+ */ + public com.google.protobuf.ByteString + getCellBlockCodecClassBytes() { java.lang.Object ref = cellBlockCodecClass_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); cellBlockCodecClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string cellBlockCompressorClass = 4; public static final int CELLBLOCKCOMPRESSORCLASS_FIELD_NUMBER = 4; private java.lang.Object cellBlockCompressorClass_; + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public String getCellBlockCompressorClass() { + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ + public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { cellBlockCompressorClass_ = s; } return s; } } - private com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { + /** + * optional string cellBlockCompressorClass = 4; + * + *
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface
+     * 
+ */ + public com.google.protobuf.ByteString + getCellBlockCompressorClassBytes() { java.lang.Object ref = cellBlockCompressorClass_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); cellBlockCompressorClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; @@ -695,7 +1104,7 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { memoizedIsInitialized = 0; @@ -705,7 +1114,7 @@ public final class RPCProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -723,12 +1132,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -750,14 +1159,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -767,7 +1176,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) obj; - + boolean result = true; result = result && (hasUserInfo() == other.hasUserInfo()); if (hasUserInfo()) { @@ -793,9 +1202,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUserInfo()) { @@ -815,89 +1228,83 @@ public final class RPCProtos { hash = (53 * hash) + getCellBlockCompressorClass().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ConnectionHeader} + * + *
+     * This is sent on connection setup after the connection preamble is sent.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { @@ -905,18 +1312,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -928,7 +1338,7 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { @@ -945,20 +1355,20 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); if (!result.isInitialized()) { @@ -966,17 +1376,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader(this); int from_bitField0_ = bitField0_; @@ -1005,7 +1405,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)other); @@ -1014,25 +1414,31 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; if (other.hasUserInfo()) { mergeUserInfo(other.getUserInfo()); } if (other.hasProtocol()) { - setProtocol(other.getProtocol()); + bitField0_ |= 0x00000002; + protocol_ = other.protocol_; + onChanged(); } if (other.hasCellBlockCodecClass()) { - setCellBlockCodecClass(other.getCellBlockCodecClass()); + bitField0_ |= 0x00000004; + cellBlockCodecClass_ = other.cellBlockCodecClass_; + onChanged(); } if (other.hasCellBlockCompressorClass()) { - setCellBlockCompressorClass(other.getCellBlockCompressorClass()); + bitField0_ |= 0x00000008; + cellBlockCompressorClass_ = other.cellBlockCompressorClass_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { @@ -1042,67 +1448,39 @@ public final class RPCProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(); - if (hasUserInfo()) { - subBuilder.mergeFrom(getUserInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setUserInfo(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - protocol_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - cellBlockCodecClass_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - cellBlockCompressorClass_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional .UserInformation userInfo = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; + /** + * optional .UserInformation userInfo = 1; + */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional .UserInformation userInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { return userInfo_; @@ -1110,6 +1488,9 @@ public final class RPCProtos { return userInfoBuilder_.getMessage(); } } + /** + * optional .UserInformation userInfo = 1; + */ public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (value == null) { @@ -1123,6 +1504,9 @@ public final class RPCProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .UserInformation userInfo = 1; + */ public Builder setUserInfo( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { if (userInfoBuilder_ == null) { @@ -1134,6 +1518,9 @@ public final class RPCProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .UserInformation userInfo = 1; + */ public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1150,6 +1537,9 @@ public final class RPCProtos { bitField0_ |= 0x00000001; return this; } + /** + * optional .UserInformation userInfo = 1; + */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); @@ -1160,11 +1550,17 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * optional .UserInformation userInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserInfoFieldBuilder().getBuilder(); } + /** + * optional .UserInformation userInfo = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); @@ -1172,6 +1568,9 @@ public final class RPCProtos { return userInfo_; } } + /** + * optional .UserInformation userInfo = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { @@ -1185,23 +1584,50 @@ public final class RPCProtos { } return userInfoBuilder_; } - + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; private java.lang.Object protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ public boolean hasProtocol() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getProtocol() { + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public java.lang.String getProtocol() { java.lang.Object ref = protocol_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); protocol_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public com.google.protobuf.ByteString + getProtocolBytes() { + java.lang.Object ref = protocol_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + protocol_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setProtocol(String value) { + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public Builder setProtocol( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1210,34 +1636,92 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ public Builder clearProtocol() { bitField0_ = (bitField0_ & ~0x00000002); protocol_ = getDefaultInstance().getProtocol(); onChanged(); return this; } - void setProtocol(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + */ + public Builder setProtocolBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; protocol_ = value; onChanged(); + return this; } - + // optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; private java.lang.Object cellBlockCodecClass_ = "org.apache.hadoop.hbase.codec.KeyValueCodec"; + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getCellBlockCodecClass() { + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ + public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); cellBlockCodecClass_ = s; return s; } else { - return (String) ref; - } - } - public Builder setCellBlockCodecClass(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ + public com.google.protobuf.ByteString + getCellBlockCodecClassBytes() { + java.lang.Object ref = cellBlockCodecClass_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + cellBlockCodecClass_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ + public Builder setCellBlockCodecClass( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1246,34 +1730,102 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ public Builder clearCellBlockCodecClass() { bitField0_ = (bitField0_ & ~0x00000004); cellBlockCodecClass_ = getDefaultInstance().getCellBlockCodecClass(); onChanged(); return this; } - void setCellBlockCodecClass(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * optional string cellBlockCodecClass = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"]; + * + *
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.
+       * 
+ */ + public Builder setCellBlockCodecClassBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; cellBlockCodecClass_ = value; onChanged(); + return this; } - + // optional string cellBlockCompressorClass = 4; private java.lang.Object cellBlockCompressorClass_ = ""; + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public String getCellBlockCompressorClass() { + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ + public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); cellBlockCompressorClass_ = s; return s; } else { - return (String) ref; - } - } - public Builder setCellBlockCompressorClass(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ + public com.google.protobuf.ByteString + getCellBlockCompressorClassBytes() { + java.lang.Object ref = cellBlockCompressorClass_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + cellBlockCompressorClass_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ + public Builder setCellBlockCompressorClass( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1282,75 +1834,195 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ public Builder clearCellBlockCompressorClass() { bitField0_ = (bitField0_ & ~0x00000008); cellBlockCompressorClass_ = getDefaultInstance().getCellBlockCompressorClass(); onChanged(); return this; } - void setCellBlockCompressorClass(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000008; + /** + * optional string cellBlockCompressorClass = 4; + * + *
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface
+       * 
+ */ + public Builder setCellBlockCompressorClassBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; cellBlockCompressorClass_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ConnectionHeader) } - + static { defaultInstance = new ConnectionHeader(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ConnectionHeader) } - + public interface CellBlockMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 length = 1; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ boolean hasLength(); + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ int getLength(); } + /** + * Protobuf type {@code CellBlockMeta} + * + *
+   * Optional Cell block Message.  Included in client RequestHeader
+   * 
+ */ public static final class CellBlockMeta extends com.google.protobuf.GeneratedMessage implements CellBlockMetaOrBuilder { // Use CellBlockMeta.newBuilder() to construct. - private CellBlockMeta(Builder builder) { + private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CellBlockMeta(boolean noInit) {} - + private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CellBlockMeta defaultInstance; public static CellBlockMeta getDefaultInstance() { return defaultInstance; } - + public CellBlockMeta getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CellBlockMeta( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + length_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CellBlockMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellBlockMeta(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional uint32 length = 1; public static final int LENGTH_FIELD_NUMBER = 1; private int length_; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ public int getLength() { return length_; } - + private void initFields() { length_ = 0; } @@ -1358,11 +2030,11 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1371,12 +2043,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1386,14 +2058,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1403,7 +2075,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; - + boolean result = true; result = result && (hasLength() == other.hasLength()); if (hasLength()) { @@ -1414,9 +2086,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLength()) { @@ -1424,89 +2100,83 @@ public final class RPCProtos { hash = (53 * hash) + getLength(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CellBlockMeta} + * + *
+     * Optional Cell block Message.  Included in client RequestHeader
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { @@ -1514,18 +2184,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1536,27 +2209,27 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); length_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_CellBlockMeta_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); if (!result.isInitialized()) { @@ -1564,17 +2237,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); int from_bitField0_ = bitField0_; @@ -1587,7 +2250,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); @@ -1596,7 +2259,7 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; if (other.hasLength()) { @@ -1605,245 +2268,534 @@ public final class RPCProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - length_ = input.readUInt32(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint32 length = 1; private int length_ ; + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ public int getLength() { return length_; } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ public Builder setLength(int value) { bitField0_ |= 0x00000001; length_ = value; onChanged(); return this; } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ public Builder clearLength() { bitField0_ = (bitField0_ & ~0x00000001); length_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CellBlockMeta) } - + static { defaultInstance = new CellBlockMeta(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CellBlockMeta) } - + public interface ExceptionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string exceptionClassName = 1; + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ boolean hasExceptionClassName(); - String getExceptionClassName(); - + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + java.lang.String getExceptionClassName(); + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + com.google.protobuf.ByteString + getExceptionClassNameBytes(); + // optional string stackTrace = 2; + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ boolean hasStackTrace(); - String getStackTrace(); - + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + java.lang.String getStackTrace(); + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + com.google.protobuf.ByteString + getStackTraceBytes(); + // optional string hostname = 3; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ boolean hasHostname(); - String getHostname(); - + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + java.lang.String getHostname(); + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + com.google.protobuf.ByteString + getHostnameBytes(); + // optional int32 port = 4; + /** + * optional int32 port = 4; + */ boolean hasPort(); + /** + * optional int32 port = 4; + */ int getPort(); - + // optional bool doNotRetry = 5; + /** + * optional bool doNotRetry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ boolean hasDoNotRetry(); + /** + * optional bool doNotRetry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ boolean getDoNotRetry(); } + /** + * Protobuf type {@code ExceptionResponse} + * + *
+   * At the RPC layer, this message is used to carry
+   * the server side exception to the RPC client.
+   * 
+ */ public static final class ExceptionResponse extends com.google.protobuf.GeneratedMessage implements ExceptionResponseOrBuilder { // Use ExceptionResponse.newBuilder() to construct. - private ExceptionResponse(Builder builder) { + private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ExceptionResponse(boolean noInit) {} - + private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ExceptionResponse defaultInstance; public static ExceptionResponse getDefaultInstance() { return defaultInstance; } - + public ExceptionResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExceptionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + exceptionClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + stackTrace_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + hostname_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + port_ = input.readInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + doNotRetry_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ExceptionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExceptionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string exceptionClassName = 1; public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 1; private java.lang.Object exceptionClassName_; + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getExceptionClassName() { + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { exceptionClassName_ = s; } return s; } } - private com.google.protobuf.ByteString getExceptionClassNameBytes() { + /** + * optional string exceptionClassName = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { java.lang.Object ref = exceptionClassName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); exceptionClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string stackTrace = 2; public static final int STACKTRACE_FIELD_NUMBER = 2; private java.lang.Object stackTrace_; + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getStackTrace() { + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { stackTrace_ = s; } return s; } } - private com.google.protobuf.ByteString getStackTraceBytes() { + /** + * optional string stackTrace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { java.lang.Object ref = stackTrace_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); stackTrace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string hostname = 3; public static final int HOSTNAME_FIELD_NUMBER = 3; private java.lang.Object hostname_; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getHostname() { + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public java.lang.String getHostname() { java.lang.Object ref = hostname_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { hostname_ = s; } return s; } } - private com.google.protobuf.ByteString getHostnameBytes() { + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { java.lang.Object ref = hostname_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); hostname_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional int32 port = 4; public static final int PORT_FIELD_NUMBER = 4; private int port_; + /** + * optional int32 port = 4; + */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int32 port = 4; + */ public int getPort() { return port_; } - + // optional bool doNotRetry = 5; public static final int DONOTRETRY_FIELD_NUMBER = 5; private boolean doNotRetry_; + /** + * optional bool doNotRetry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool doNotRetry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ public boolean getDoNotRetry() { return doNotRetry_; } - + private void initFields() { exceptionClassName_ = ""; stackTrace_ = ""; @@ -1855,11 +2807,11 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1880,12 +2832,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1911,14 +2863,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1928,7 +2880,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; - + boolean result = true; result = result && (hasExceptionClassName() == other.hasExceptionClassName()); if (hasExceptionClassName()) { @@ -1959,9 +2911,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExceptionClassName()) { @@ -1985,89 +2941,84 @@ public final class RPCProtos { hash = (53 * hash) + hashBoolean(getDoNotRetry()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ExceptionResponse} + * + *
+     * At the RPC layer, this message is used to carry
+     * the server side exception to the RPC client.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { @@ -2075,18 +3026,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2097,7 +3051,7 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); exceptionClassName_ = ""; @@ -2112,20 +3066,20 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2133,17 +3087,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); int from_bitField0_ = bitField0_; @@ -2172,7 +3116,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); @@ -2181,17 +3125,23 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; if (other.hasExceptionClassName()) { - setExceptionClassName(other.getExceptionClassName()); + bitField0_ |= 0x00000001; + exceptionClassName_ = other.exceptionClassName_; + onChanged(); } if (other.hasStackTrace()) { - setStackTrace(other.getStackTrace()); + bitField0_ |= 0x00000002; + stackTrace_ = other.stackTrace_; + onChanged(); } if (other.hasHostname()) { - setHostname(other.getHostname()); + bitField0_ |= 0x00000004; + hostname_ = other.hostname_; + onChanged(); } if (other.hasPort()) { setPort(other.getPort()); @@ -2202,81 +3152,89 @@ public final class RPCProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - exceptionClassName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - hostname_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - port_ = input.readInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - doNotRetry_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string exceptionClassName = 1; private java.lang.Object exceptionClassName_ = ""; + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getExceptionClassName() { + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); exceptionClassName_ = s; return s; } else { - return (String) ref; - } - } - public Builder setExceptionClassName(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2285,34 +3243,96 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ public Builder clearExceptionClassName() { bitField0_ = (bitField0_ & ~0x00000001); exceptionClassName_ = getDefaultInstance().getExceptionClassName(); onChanged(); return this; } - void setExceptionClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string exceptionClassName = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; exceptionClassName_ = value; onChanged(); + return this; } - + // optional string stackTrace = 2; private java.lang.Object stackTrace_ = ""; + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getStackTrace() { + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); stackTrace_ = s; return s; } else { - return (String) ref; - } - } - public Builder setStackTrace(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTrace( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2321,34 +3341,100 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ public Builder clearStackTrace() { bitField0_ = (bitField0_ & ~0x00000002); stackTrace_ = getDefaultInstance().getStackTrace(); onChanged(); return this; } - void setStackTrace(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string stackTrace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTraceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); + return this; } - + // optional string hostname = 3; private java.lang.Object hostname_ = ""; + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getHostname() { + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public java.lang.String getHostname() { java.lang.Object ref = hostname_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); hostname_ = s; return s; } else { - return (String) ref; - } - } - public Builder setHostname(String value) { + return (java.lang.String) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostname( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2357,203 +3443,509 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ public Builder clearHostname() { bitField0_ = (bitField0_ & ~0x00000004); hostname_ = getDefaultInstance().getHostname(); onChanged(); return this; } - void setHostname(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostnameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; hostname_ = value; onChanged(); + return this; } - + // optional int32 port = 4; private int port_ ; + /** + * optional int32 port = 4; + */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional int32 port = 4; + */ public int getPort() { return port_; } + /** + * optional int32 port = 4; + */ public Builder setPort(int value) { bitField0_ |= 0x00000008; port_ = value; onChanged(); return this; } + /** + * optional int32 port = 4; + */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000008); port_ = 0; onChanged(); return this; } - + // optional bool doNotRetry = 5; private boolean doNotRetry_ ; + /** + * optional bool doNotRetry = 5; + * + *
+       * Set if we are NOT to retry on receipt of this exception
+       * 
+ */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bool doNotRetry = 5; + * + *
+       * Set if we are NOT to retry on receipt of this exception
+       * 
+ */ public boolean getDoNotRetry() { return doNotRetry_; } + /** + * optional bool doNotRetry = 5; + * + *
+       * Set if we are NOT to retry on receipt of this exception
+       * 
+ */ public Builder setDoNotRetry(boolean value) { bitField0_ |= 0x00000010; doNotRetry_ = value; onChanged(); return this; } + /** + * optional bool doNotRetry = 5; + * + *
+       * Set if we are NOT to retry on receipt of this exception
+       * 
+ */ public Builder clearDoNotRetry() { bitField0_ = (bitField0_ & ~0x00000010); doNotRetry_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ExceptionResponse) } - + static { defaultInstance = new ExceptionResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ExceptionResponse) } - + public interface RequestHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 callId = 1; + /** + * optional uint32 callId = 1; + * + *
+     * Monotonically increasing callId to keep track of RPC requests and their response
+     * 
+ */ boolean hasCallId(); + /** + * optional uint32 callId = 1; + * + *
+     * Monotonically increasing callId to keep track of RPC requests and their response
+     * 
+ */ int getCallId(); - + // optional .RPCTInfo traceInfo = 2; + /** + * optional .RPCTInfo traceInfo = 2; + */ boolean hasTraceInfo(); + /** + * optional .RPCTInfo traceInfo = 2; + */ org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo(); + /** + * optional .RPCTInfo traceInfo = 2; + */ org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder(); - + // optional string methodName = 3; + /** + * optional string methodName = 3; + */ boolean hasMethodName(); - String getMethodName(); - + /** + * optional string methodName = 3; + */ + java.lang.String getMethodName(); + /** + * optional string methodName = 3; + */ + com.google.protobuf.ByteString + getMethodNameBytes(); + // optional bool requestParam = 4; + /** + * optional bool requestParam = 4; + * + *
+     * If true, then a pb Message param follows.
+     * 
+ */ boolean hasRequestParam(); + /** + * optional bool requestParam = 4; + * + *
+     * If true, then a pb Message param follows.
+     * 
+ */ boolean getRequestParam(); - + // optional .CellBlockMeta cellBlockMeta = 5; + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ boolean hasCellBlockMeta(); + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); } + /** + * Protobuf type {@code RequestHeader} + * + *
+   * Header sent making a request.
+   * 
+ */ public static final class RequestHeader extends com.google.protobuf.GeneratedMessage implements RequestHeaderOrBuilder { // Use RequestHeader.newBuilder() to construct. - private RequestHeader(Builder builder) { + private RequestHeader(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RequestHeader(boolean noInit) {} - + private RequestHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RequestHeader defaultInstance; public static RequestHeader getDefaultInstance() { return defaultInstance; } - + public RequestHeader getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RequestHeader( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + callId_ = input.readUInt32(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = traceInfo_.toBuilder(); + } + traceInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(traceInfo_); + traceInfo_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + requestParam_ = input.readBool(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + subBuilder = cellBlockMeta_.toBuilder(); + } + cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(cellBlockMeta_); + cellBlockMeta_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000010; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RequestHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RequestHeader(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional uint32 callId = 1; public static final int CALLID_FIELD_NUMBER = 1; private int callId_; + /** + * optional uint32 callId = 1; + * + *
+     * Monotonically increasing callId to keep track of RPC requests and their response
+     * 
+ */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 callId = 1; + * + *
+     * Monotonically increasing callId to keep track of RPC requests and their response
+     * 
+ */ public int getCallId() { return callId_; } - + // optional .RPCTInfo traceInfo = 2; public static final int TRACEINFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_; + /** + * optional .RPCTInfo traceInfo = 2; + */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .RPCTInfo traceInfo = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { return traceInfo_; } + /** + * optional .RPCTInfo traceInfo = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { return traceInfo_; } - + // optional string methodName = 3; public static final int METHODNAME_FIELD_NUMBER = 3; private java.lang.Object methodName_; + /** + * optional string methodName = 3; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getMethodName() { + /** + * optional string methodName = 3; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { methodName_ = s; } return s; } } - private com.google.protobuf.ByteString getMethodNameBytes() { + /** + * optional string methodName = 3; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { java.lang.Object ref = methodName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bool requestParam = 4; public static final int REQUESTPARAM_FIELD_NUMBER = 4; private boolean requestParam_; + /** + * optional bool requestParam = 4; + * + *
+     * If true, then a pb Message param follows.
+     * 
+ */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool requestParam = 4; + * + *
+     * If true, then a pb Message param follows.
+     * 
+ */ public boolean getRequestParam() { return requestParam_; } - + // optional .CellBlockMeta cellBlockMeta = 5; public static final int CELLBLOCKMETA_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_; } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_; } - + private void initFields() { callId_ = 0; traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); @@ -2565,11 +3957,11 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2590,12 +3982,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2621,14 +4013,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2638,7 +4030,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) obj; - + boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { @@ -2669,9 +4061,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { @@ -2695,89 +4091,83 @@ public final class RPCProtos { hash = (53 * hash) + getCellBlockMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RequestHeader} + * + *
+     * Header sent making a request.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { @@ -2785,18 +4175,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2809,7 +4202,7 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); callId_ = 0; @@ -2832,20 +4225,20 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); if (!result.isInitialized()) { @@ -2853,17 +4246,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader(this); int from_bitField0_ = bitField0_; @@ -2900,7 +4283,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader)other); @@ -2909,7 +4292,7 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance()) return this; if (other.hasCallId()) { @@ -2919,7 +4302,9 @@ public final class RPCProtos { mergeTraceInfo(other.getTraceInfo()); } if (other.hasMethodName()) { - setMethodName(other.getMethodName()); + bitField0_ |= 0x00000004; + methodName_ = other.methodName_; + onChanged(); } if (other.hasRequestParam()) { setRequestParam(other.getRequestParam()); @@ -2930,101 +4315,92 @@ public final class RPCProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); - if (hasTraceInfo()) { - subBuilder.mergeFrom(getTraceInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setTraceInfo(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - requestParam_ = input.readBool(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(); - if (hasCellBlockMeta()) { - subBuilder.mergeFrom(getCellBlockMeta()); - } - input.readMessage(subBuilder, extensionRegistry); - setCellBlockMeta(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint32 callId = 1; private int callId_ ; + /** + * optional uint32 callId = 1; + * + *
+       * Monotonically increasing callId to keep track of RPC requests and their response
+       * 
+ */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 callId = 1; + * + *
+       * Monotonically increasing callId to keep track of RPC requests and their response
+       * 
+ */ public int getCallId() { return callId_; } + /** + * optional uint32 callId = 1; + * + *
+       * Monotonically increasing callId to keep track of RPC requests and their response
+       * 
+ */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } + /** + * optional uint32 callId = 1; + * + *
+       * Monotonically increasing callId to keep track of RPC requests and their response
+       * 
+ */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } - + // optional .RPCTInfo traceInfo = 2; private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> traceInfoBuilder_; + /** + * optional .RPCTInfo traceInfo = 2; + */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .RPCTInfo traceInfo = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { if (traceInfoBuilder_ == null) { return traceInfo_; @@ -3032,6 +4408,9 @@ public final class RPCProtos { return traceInfoBuilder_.getMessage(); } } + /** + * optional .RPCTInfo traceInfo = 2; + */ public Builder setTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (value == null) { @@ -3045,6 +4424,9 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .RPCTInfo traceInfo = 2; + */ public Builder setTraceInfo( org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { if (traceInfoBuilder_ == null) { @@ -3056,6 +4438,9 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .RPCTInfo traceInfo = 2; + */ public Builder mergeTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -3072,6 +4457,9 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .RPCTInfo traceInfo = 2; + */ public Builder clearTraceInfo() { if (traceInfoBuilder_ == null) { traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); @@ -3082,11 +4470,17 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .RPCTInfo traceInfo = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTraceInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTraceInfoFieldBuilder().getBuilder(); } + /** + * optional .RPCTInfo traceInfo = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { if (traceInfoBuilder_ != null) { return traceInfoBuilder_.getMessageOrBuilder(); @@ -3094,6 +4488,9 @@ public final class RPCProtos { return traceInfo_; } } + /** + * optional .RPCTInfo traceInfo = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> getTraceInfoFieldBuilder() { @@ -3107,23 +4504,50 @@ public final class RPCProtos { } return traceInfoBuilder_; } - + // optional string methodName = 3; private java.lang.Object methodName_ = ""; + /** + * optional string methodName = 3; + */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getMethodName() { + /** + * optional string methodName = 3; + */ + public java.lang.String getMethodName() { java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); methodName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string methodName = 3; + */ + public com.google.protobuf.ByteString + getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setMethodName(String value) { + /** + * optional string methodName = 3; + */ + public Builder setMethodName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3132,46 +4556,99 @@ public final class RPCProtos { onChanged(); return this; } + /** + * optional string methodName = 3; + */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000004); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * optional string methodName = 3; + */ + public Builder setMethodNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; methodName_ = value; onChanged(); + return this; } - + // optional bool requestParam = 4; private boolean requestParam_ ; + /** + * optional bool requestParam = 4; + * + *
+       * If true, then a pb Message param follows.
+       * 
+ */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool requestParam = 4; + * + *
+       * If true, then a pb Message param follows.
+       * 
+ */ public boolean getRequestParam() { return requestParam_; } + /** + * optional bool requestParam = 4; + * + *
+       * If true, then a pb Message param follows.
+       * 
+ */ public Builder setRequestParam(boolean value) { bitField0_ |= 0x00000008; requestParam_ = value; onChanged(); return this; } + /** + * optional bool requestParam = 4; + * + *
+       * If true, then a pb Message param follows.
+       * 
+ */ public Builder clearRequestParam() { bitField0_ = (bitField0_ & ~0x00000008); requestParam_ = false; onChanged(); return this; } - + // optional .CellBlockMeta cellBlockMeta = 5; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_; @@ -3179,6 +4656,13 @@ public final class RPCProtos { return cellBlockMetaBuilder_.getMessage(); } } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { @@ -3192,6 +4676,13 @@ public final class RPCProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { @@ -3203,6 +4694,13 @@ public final class RPCProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && @@ -3219,6 +4717,13 @@ public final class RPCProtos { bitField0_ |= 0x00000010; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); @@ -3229,11 +4734,25 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000010; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); @@ -3241,6 +4760,13 @@ public final class RPCProtos { return cellBlockMeta_; } } + /** + * optional .CellBlockMeta cellBlockMeta = 5; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { @@ -3254,100 +4780,289 @@ public final class RPCProtos { } return cellBlockMetaBuilder_; } - + // @@protoc_insertion_point(builder_scope:RequestHeader) } - + static { defaultInstance = new RequestHeader(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RequestHeader) } - + public interface ResponseHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional uint32 callId = 1; + /** + * optional uint32 callId = 1; + */ boolean hasCallId(); + /** + * optional uint32 callId = 1; + */ int getCallId(); - + // optional .ExceptionResponse exception = 2; + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ boolean hasException(); + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException(); + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder(); - + // optional .CellBlockMeta cellBlockMeta = 3; + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ boolean hasCellBlockMeta(); + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); } + /** + * Protobuf type {@code ResponseHeader} + */ public static final class ResponseHeader extends com.google.protobuf.GeneratedMessage implements ResponseHeaderOrBuilder { // Use ResponseHeader.newBuilder() to construct. - private ResponseHeader(Builder builder) { + private ResponseHeader(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ResponseHeader(boolean noInit) {} - + private ResponseHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ResponseHeader defaultInstance; public static ResponseHeader getDefaultInstance() { return defaultInstance; } - + public ResponseHeader getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ResponseHeader( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + callId_ = input.readUInt32(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = exception_.toBuilder(); + } + exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(exception_); + exception_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = cellBlockMeta_.toBuilder(); + } + cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(cellBlockMeta_); + cellBlockMeta_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ResponseHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResponseHeader(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional uint32 callId = 1; public static final int CALLID_FIELD_NUMBER = 1; private int callId_; + /** + * optional uint32 callId = 1; + */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 callId = 1; + */ public int getCallId() { return callId_; } - + // optional .ExceptionResponse exception = 2; public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_; + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { return exception_; } + /** + * optional .ExceptionResponse exception = 2; + * + *
+     * If present, then request threw an exception and no response message (else we presume one)
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { return exception_; } - + // optional .CellBlockMeta cellBlockMeta = 3; public static final int CELLBLOCKMETA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_; } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+     * If present, then an encoded data block follows.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_; } - + private void initFields() { callId_ = 0; exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); @@ -3357,11 +5072,11 @@ public final class RPCProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3376,12 +5091,12 @@ public final class RPCProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3399,14 +5114,14 @@ public final class RPCProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3416,7 +5131,7 @@ public final class RPCProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) obj; - + boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { @@ -3437,9 +5152,13 @@ public final class RPCProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { @@ -3455,89 +5174,79 @@ public final class RPCProtos { hash = (53 * hash) + getCellBlockMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ResponseHeader} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { @@ -3545,18 +5254,21 @@ public final class RPCProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3569,7 +5281,7 @@ public final class RPCProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); callId_ = 0; @@ -3588,20 +5300,20 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); if (!result.isInitialized()) { @@ -3609,17 +5321,7 @@ public final class RPCProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader(this); int from_bitField0_ = bitField0_; @@ -3648,7 +5350,7 @@ public final class RPCProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader)other); @@ -3657,7 +5359,7 @@ public final class RPCProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance()) return this; if (other.hasCallId()) { @@ -3672,91 +5374,84 @@ public final class RPCProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(); - if (hasException()) { - subBuilder.mergeFrom(getException()); - } - input.readMessage(subBuilder, extensionRegistry); - setException(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(); - if (hasCellBlockMeta()) { - subBuilder.mergeFrom(getCellBlockMeta()); - } - input.readMessage(subBuilder, extensionRegistry); - setCellBlockMeta(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional uint32 callId = 1; private int callId_ ; + /** + * optional uint32 callId = 1; + */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional uint32 callId = 1; + */ public int getCallId() { return callId_; } + /** + * optional uint32 callId = 1; + */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } + /** + * optional uint32 callId = 1; + */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } - + // optional .ExceptionResponse exception = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> exceptionBuilder_; + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { if (exceptionBuilder_ == null) { return exception_; @@ -3764,6 +5459,13 @@ public final class RPCProtos { return exceptionBuilder_.getMessage(); } } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public Builder setException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (value == null) { @@ -3777,6 +5479,13 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public Builder setException( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder builderForValue) { if (exceptionBuilder_ == null) { @@ -3788,6 +5497,13 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -3804,6 +5520,13 @@ public final class RPCProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); @@ -3814,11 +5537,25 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); @@ -3826,6 +5563,13 @@ public final class RPCProtos { return exception_; } } + /** + * optional .ExceptionResponse exception = 2; + * + *
+       * If present, then request threw an exception and no response message (else we presume one)
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> getExceptionFieldBuilder() { @@ -3839,14 +5583,28 @@ public final class RPCProtos { } return exceptionBuilder_; } - + // optional .CellBlockMeta cellBlockMeta = 3; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_; @@ -3854,6 +5612,13 @@ public final class RPCProtos { return cellBlockMetaBuilder_.getMessage(); } } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { @@ -3867,6 +5632,13 @@ public final class RPCProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { @@ -3878,6 +5650,13 @@ public final class RPCProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && @@ -3894,6 +5673,13 @@ public final class RPCProtos { bitField0_ |= 0x00000004; return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); @@ -3904,11 +5690,25 @@ public final class RPCProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); @@ -3916,6 +5716,13 @@ public final class RPCProtos { return cellBlockMeta_; } } + /** + * optional .CellBlockMeta cellBlockMeta = 3; + * + *
+       * If present, then an encoded data block follows.
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { @@ -3929,18 +5736,18 @@ public final class RPCProtos { } return cellBlockMetaBuilder_; } - + // @@protoc_insertion_point(builder_scope:ResponseHeader) } - + static { defaultInstance = new ResponseHeader(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ResponseHeader) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_UserInformation_descriptor; private static @@ -3971,7 +5778,7 @@ public final class RPCProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ResponseHeader_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -4012,49 +5819,37 @@ public final class RPCProtos { internal_static_UserInformation_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserInformation_descriptor, - new java.lang.String[] { "EffectiveUser", "RealUser", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); + new java.lang.String[] { "EffectiveUser", "RealUser", }); internal_static_ConnectionHeader_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_ConnectionHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ConnectionHeader_descriptor, - new java.lang.String[] { "UserInfo", "Protocol", "CellBlockCodecClass", "CellBlockCompressorClass", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); + new java.lang.String[] { "UserInfo", "Protocol", "CellBlockCodecClass", "CellBlockCompressorClass", }); internal_static_CellBlockMeta_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_CellBlockMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CellBlockMeta_descriptor, - new java.lang.String[] { "Length", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + new java.lang.String[] { "Length", }); internal_static_ExceptionResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_ExceptionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExceptionResponse_descriptor, - new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); internal_static_RequestHeader_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_RequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RequestHeader_descriptor, - new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); + new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", }); internal_static_ResponseHeader_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ResponseHeader_descriptor, - new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); + new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }); return null; } }; @@ -4065,6 +5860,6 @@ public final class RPCProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java index 89eebc4..cf3fbd6 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java @@ -10,78 +10,239 @@ public final class RegionServerStatusProtos { } public interface RegionServerStartupRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint32 port = 1; + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ boolean hasPort(); + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ int getPort(); - + // required uint64 serverStartCode = 2; + /** + * required uint64 serverStartCode = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ boolean hasServerStartCode(); + /** + * required uint64 serverStartCode = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ long getServerStartCode(); - + // required uint64 serverCurrentTime = 3; + /** + * required uint64 serverCurrentTime = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ boolean hasServerCurrentTime(); + /** + * required uint64 serverCurrentTime = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ long getServerCurrentTime(); } + /** + * Protobuf type {@code RegionServerStartupRequest} + */ public static final class RegionServerStartupRequest extends com.google.protobuf.GeneratedMessage implements RegionServerStartupRequestOrBuilder { // Use RegionServerStartupRequest.newBuilder() to construct. - private RegionServerStartupRequest(Builder builder) { + private RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionServerStartupRequest(boolean noInit) {} - + private RegionServerStartupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionServerStartupRequest defaultInstance; public static RegionServerStartupRequest getDefaultInstance() { return defaultInstance; } - + public RegionServerStartupRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerStartupRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + port_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + serverStartCode_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + serverCurrentTime_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerStartupRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required uint32 port = 1; public static final int PORT_FIELD_NUMBER = 1; private int port_; + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ public boolean hasPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ public int getPort() { return port_; } - + // required uint64 serverStartCode = 2; public static final int SERVERSTARTCODE_FIELD_NUMBER = 2; private long serverStartCode_; + /** + * required uint64 serverStartCode = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required uint64 serverStartCode = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ public long getServerStartCode() { return serverStartCode_; } - + // required uint64 serverCurrentTime = 3; public static final int SERVERCURRENTTIME_FIELD_NUMBER = 3; private long serverCurrentTime_; + /** + * required uint64 serverCurrentTime = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ public boolean hasServerCurrentTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 serverCurrentTime = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ public long getServerCurrentTime() { return serverCurrentTime_; } - + private void initFields() { port_ = 0; serverStartCode_ = 0L; @@ -91,7 +252,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPort()) { memoizedIsInitialized = 0; return false; @@ -107,7 +268,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -122,12 +283,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -145,14 +306,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -162,7 +323,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj; - + boolean result = true; result = result && (hasPort() == other.hasPort()); if (hasPort()) { @@ -183,9 +344,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPort()) { @@ -201,89 +366,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + hashLong(getServerCurrentTime()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionServerStartupRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { @@ -291,18 +446,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -313,7 +471,7 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); port_ = 0; @@ -324,20 +482,20 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); if (!result.isInitialized()) { @@ -345,17 +503,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this); int from_bitField0_ = bitField0_; @@ -376,7 +524,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other); @@ -385,7 +533,7 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this; if (other.hasPort()) { @@ -400,7 +548,7 @@ public final class RegionServerStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPort()) { @@ -416,187 +564,419 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - port_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - serverStartCode_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - serverCurrentTime_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint32 port = 1; private int port_ ; + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ public boolean hasPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ public int getPort() { return port_; } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ public Builder setPort(int value) { bitField0_ |= 0x00000001; port_ = value; onChanged(); return this; } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000001); port_ = 0; onChanged(); return this; } - + // required uint64 serverStartCode = 2; private long serverStartCode_ ; + /** + * required uint64 serverStartCode = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required uint64 serverStartCode = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ public long getServerStartCode() { return serverStartCode_; } + /** + * required uint64 serverStartCode = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ public Builder setServerStartCode(long value) { bitField0_ |= 0x00000002; serverStartCode_ = value; onChanged(); return this; } + /** + * required uint64 serverStartCode = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ public Builder clearServerStartCode() { bitField0_ = (bitField0_ & ~0x00000002); serverStartCode_ = 0L; onChanged(); return this; } - + // required uint64 serverCurrentTime = 3; private long serverCurrentTime_ ; + /** + * required uint64 serverCurrentTime = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ public boolean hasServerCurrentTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 serverCurrentTime = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ public long getServerCurrentTime() { return serverCurrentTime_; } + /** + * required uint64 serverCurrentTime = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ public Builder setServerCurrentTime(long value) { bitField0_ |= 0x00000004; serverCurrentTime_ = value; onChanged(); return this; } + /** + * required uint64 serverCurrentTime = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ public Builder clearServerCurrentTime() { bitField0_ = (bitField0_ & ~0x00000004); serverCurrentTime_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RegionServerStartupRequest) } - + static { defaultInstance = new RegionServerStartupRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionServerStartupRequest) } - + public interface RegionServerStartupResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .NameStringPair mapEntries = 1; + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ java.util.List getMapEntriesList(); + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index); + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ int getMapEntriesCount(); + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ java.util.List getMapEntriesOrBuilderList(); + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index); } + /** + * Protobuf type {@code RegionServerStartupResponse} + */ public static final class RegionServerStartupResponse extends com.google.protobuf.GeneratedMessage implements RegionServerStartupResponseOrBuilder { // Use RegionServerStartupResponse.newBuilder() to construct. - private RegionServerStartupResponse(Builder builder) { + private RegionServerStartupResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionServerStartupResponse(boolean noInit) {} - + private RegionServerStartupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionServerStartupResponse defaultInstance; public static RegionServerStartupResponse getDefaultInstance() { return defaultInstance; } - + public RegionServerStartupResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerStartupResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + mapEntries_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerStartupResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated .NameStringPair mapEntries = 1; public static final int MAPENTRIES_FIELD_NUMBER = 1; private java.util.List mapEntries_; + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ public java.util.List getMapEntriesList() { return mapEntries_; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ public java.util.List getMapEntriesOrBuilderList() { return mapEntries_; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ public int getMapEntriesCount() { return mapEntries_.size(); } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { return mapEntries_.get(index); } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index) { return mapEntries_.get(index); } - + private void initFields() { mapEntries_ = java.util.Collections.emptyList(); } @@ -604,7 +984,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + for (int i = 0; i < getMapEntriesCount(); i++) { if (!getMapEntries(i).isInitialized()) { memoizedIsInitialized = 0; @@ -614,7 +994,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -623,12 +1003,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < mapEntries_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -638,14 +1018,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -655,7 +1035,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj; - + boolean result = true; result = result && getMapEntriesList() .equals(other.getMapEntriesList()); @@ -663,9 +1043,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getMapEntriesCount() > 0) { @@ -673,89 +1057,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + getMapEntriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionServerStartupResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { @@ -763,18 +1137,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -786,7 +1163,7 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (mapEntriesBuilder_ == null) { @@ -797,20 +1174,20 @@ public final class RegionServerStatusProtos { } return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); if (!result.isInitialized()) { @@ -818,17 +1195,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this); int from_bitField0_ = bitField0_; @@ -844,7 +1211,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other); @@ -853,7 +1220,7 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this; if (mapEntriesBuilder_ == null) { @@ -885,7 +1252,7 @@ public final class RegionServerStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { for (int i = 0; i < getMapEntriesCount(); i++) { if (!getMapEntries(i).isInitialized()) { @@ -895,42 +1262,26 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMapEntries(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .NameStringPair mapEntries = 1; private java.util.List mapEntries_ = java.util.Collections.emptyList(); @@ -940,10 +1291,20 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_; - + + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public java.util.List getMapEntriesList() { if (mapEntriesBuilder_ == null) { return java.util.Collections.unmodifiableList(mapEntries_); @@ -951,6 +1312,16 @@ public final class RegionServerStatusProtos { return mapEntriesBuilder_.getMessageList(); } } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public int getMapEntriesCount() { if (mapEntriesBuilder_ == null) { return mapEntries_.size(); @@ -958,6 +1329,16 @@ public final class RegionServerStatusProtos { return mapEntriesBuilder_.getCount(); } } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { if (mapEntriesBuilder_ == null) { return mapEntries_.get(index); @@ -965,6 +1346,16 @@ public final class RegionServerStatusProtos { return mapEntriesBuilder_.getMessage(index); } } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder setMapEntries( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (mapEntriesBuilder_ == null) { @@ -979,6 +1370,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder setMapEntries( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (mapEntriesBuilder_ == null) { @@ -990,6 +1391,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder addMapEntries(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (mapEntriesBuilder_ == null) { if (value == null) { @@ -1003,6 +1414,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder addMapEntries( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (mapEntriesBuilder_ == null) { @@ -1017,6 +1438,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder addMapEntries( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (mapEntriesBuilder_ == null) { @@ -1028,6 +1459,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder addMapEntries( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (mapEntriesBuilder_ == null) { @@ -1039,6 +1480,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder addAllMapEntries( java.lang.Iterable values) { if (mapEntriesBuilder_ == null) { @@ -1050,6 +1501,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder clearMapEntries() { if (mapEntriesBuilder_ == null) { mapEntries_ = java.util.Collections.emptyList(); @@ -1060,6 +1521,16 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public Builder removeMapEntries(int index) { if (mapEntriesBuilder_ == null) { ensureMapEntriesIsMutable(); @@ -1070,10 +1541,30 @@ public final class RegionServerStatusProtos { } return this; } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder( int index) { return getMapEntriesFieldBuilder().getBuilder(index); } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index) { if (mapEntriesBuilder_ == null) { @@ -1081,6 +1572,16 @@ public final class RegionServerStatusProtos { return mapEntriesBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public java.util.List getMapEntriesOrBuilderList() { if (mapEntriesBuilder_ != null) { @@ -1089,15 +1590,45 @@ public final class RegionServerStatusProtos { return java.util.Collections.unmodifiableList(mapEntries_); } } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() { return getMapEntriesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder( int index) { return getMapEntriesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } + /** + * repeated .NameStringPair mapEntries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ public java.util.List getMapEntriesBuilderList() { return getMapEntriesFieldBuilder().getBuilderList(); @@ -1116,86 +1647,234 @@ public final class RegionServerStatusProtos { } return mapEntriesBuilder_; } - + // @@protoc_insertion_point(builder_scope:RegionServerStartupResponse) } - + static { defaultInstance = new RegionServerStartupResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionServerStartupResponse) } - + public interface RegionServerReportRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName server = 1; + /** + * required .ServerName server = 1; + */ boolean hasServer(); + /** + * required .ServerName server = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * required .ServerName server = 1; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - + // optional .ServerLoad load = 2; + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ boolean hasLoad(); + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad(); + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder(); } + /** + * Protobuf type {@code RegionServerReportRequest} + */ public static final class RegionServerReportRequest extends com.google.protobuf.GeneratedMessage implements RegionServerReportRequestOrBuilder { // Use RegionServerReportRequest.newBuilder() to construct. - private RegionServerReportRequest(Builder builder) { + private RegionServerReportRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionServerReportRequest(boolean noInit) {} - + private RegionServerReportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionServerReportRequest defaultInstance; public static RegionServerReportRequest getDefaultInstance() { return defaultInstance; } - + public RegionServerReportRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerReportRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = load_.toBuilder(); + } + load_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(load_); + load_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); } - - private int bitField0_; + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerReportRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerReportRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; // required .ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + /** + * required .ServerName server = 1; + */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { return server_; } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_; } - + // optional .ServerLoad load = 2; public static final int LOAD_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_; + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ public boolean hasLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { return load_; } + /** + * optional .ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { return load_; } - + private void initFields() { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); @@ -1204,7 +1883,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServer()) { memoizedIsInitialized = 0; return false; @@ -1222,7 +1901,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1234,12 +1913,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1253,14 +1932,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1270,7 +1949,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj; - + boolean result = true; result = result && (hasServer() == other.hasServer()); if (hasServer()) { @@ -1286,9 +1965,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServer()) { @@ -1300,89 +1983,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + getLoad().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionServerReportRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { @@ -1390,18 +2063,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1414,7 +2090,7 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverBuilder_ == null) { @@ -1431,20 +2107,20 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1452,17 +2128,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(this); int from_bitField0_ = bitField0_; @@ -1487,7 +2153,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other); @@ -1496,7 +2162,7 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance()) return this; if (other.hasServer()) { @@ -1508,7 +2174,7 @@ public final class RegionServerStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServer()) { @@ -1526,61 +2192,39 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); - if (hasLoad()) { - subBuilder.mergeFrom(getLoad()); - } - input.readMessage(subBuilder, extensionRegistry); - setLoad(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName server = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + /** + * required .ServerName server = 1; + */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_; @@ -1588,6 +2232,9 @@ public final class RegionServerStatusProtos { return serverBuilder_.getMessage(); } } + /** + * required .ServerName server = 1; + */ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { @@ -1601,6 +2248,9 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { @@ -1612,6 +2262,9 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -1628,6 +2281,9 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -1638,11 +2294,17 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerFieldBuilder().getBuilder(); } + /** + * required .ServerName server = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); @@ -1650,6 +2312,9 @@ public final class RegionServerStatusProtos { return server_; } } + /** + * required .ServerName server = 1; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { @@ -1663,14 +2328,28 @@ public final class RegionServerStatusProtos { } return serverBuilder_; } - + // optional .ServerLoad load = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> loadBuilder_; + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public boolean hasLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { if (loadBuilder_ == null) { return load_; @@ -1678,6 +2357,13 @@ public final class RegionServerStatusProtos { return loadBuilder_.getMessage(); } } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public Builder setLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { if (loadBuilder_ == null) { if (value == null) { @@ -1691,6 +2377,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public Builder setLoad( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { if (loadBuilder_ == null) { @@ -1702,6 +2395,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public Builder mergeLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { if (loadBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -1718,6 +2418,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public Builder clearLoad() { if (loadBuilder_ == null) { load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); @@ -1728,11 +2435,25 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getLoadBuilder() { bitField0_ |= 0x00000002; onChanged(); return getLoadFieldBuilder().getBuilder(); } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { if (loadBuilder_ != null) { return loadBuilder_.getMessageOrBuilder(); @@ -1740,6 +2461,13 @@ public final class RegionServerStatusProtos { return load_; } } + /** + * optional .ServerLoad load = 2; + * + *
+       ** load the server is under 
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> getLoadFieldBuilder() { @@ -1753,84 +2481,145 @@ public final class RegionServerStatusProtos { } return loadBuilder_; } - + // @@protoc_insertion_point(builder_scope:RegionServerReportRequest) } - + static { defaultInstance = new RegionServerReportRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionServerReportRequest) } - + public interface RegionServerReportResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code RegionServerReportResponse} + */ public static final class RegionServerReportResponse extends com.google.protobuf.GeneratedMessage implements RegionServerReportResponseOrBuilder { // Use RegionServerReportResponse.newBuilder() to construct. - private RegionServerReportResponse(Builder builder) { + private RegionServerReportResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionServerReportResponse(boolean noInit) {} - + private RegionServerReportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionServerReportResponse defaultInstance; public static RegionServerReportResponse getDefaultInstance() { return defaultInstance; } - + public RegionServerReportResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerReportResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerReportResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerReportResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1840,101 +2629,95 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionServerReportResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder { @@ -1942,18 +2725,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1964,25 +2750,25 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1990,23 +2776,13 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)other); @@ -2015,142 +2791,300 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:RegionServerReportResponse) } - + static { defaultInstance = new RegionServerReportResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionServerReportResponse) } - + public interface ReportRSFatalErrorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName server = 1; + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ boolean hasServer(); + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - + // required string errorMessage = 2; + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ boolean hasErrorMessage(); - String getErrorMessage(); + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ + java.lang.String getErrorMessage(); + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ + com.google.protobuf.ByteString + getErrorMessageBytes(); } + /** + * Protobuf type {@code ReportRSFatalErrorRequest} + */ public static final class ReportRSFatalErrorRequest extends com.google.protobuf.GeneratedMessage implements ReportRSFatalErrorRequestOrBuilder { // Use ReportRSFatalErrorRequest.newBuilder() to construct. - private ReportRSFatalErrorRequest(Builder builder) { + private ReportRSFatalErrorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReportRSFatalErrorRequest(boolean noInit) {} - + private ReportRSFatalErrorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReportRSFatalErrorRequest defaultInstance; public static ReportRSFatalErrorRequest getDefaultInstance() { return defaultInstance; } - + public ReportRSFatalErrorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReportRSFatalErrorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + errorMessage_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReportRSFatalErrorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRSFatalErrorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required .ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { return server_; } + /** + * required .ServerName server = 1; + * + *
+     ** name of the server experiencing the error 
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_; } - + // required string errorMessage = 2; public static final int ERRORMESSAGE_FIELD_NUMBER = 2; private java.lang.Object errorMessage_; + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getErrorMessage() { + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ + public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { errorMessage_ = s; } return s; } } - private com.google.protobuf.ByteString getErrorMessageBytes() { + /** + * required string errorMessage = 2; + * + *
+     ** informative text to expose in the master logs and UI 
+     * 
+ */ + public com.google.protobuf.ByteString + getErrorMessageBytes() { java.lang.Object ref = errorMessage_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); errorMessage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); errorMessage_ = ""; @@ -2159,7 +3093,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServer()) { memoizedIsInitialized = 0; return false; @@ -2175,7 +3109,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2187,12 +3121,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2206,14 +3140,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2223,7 +3157,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) obj; - + boolean result = true; result = result && (hasServer() == other.hasServer()); if (hasServer()) { @@ -2239,9 +3173,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServer()) { @@ -2253,89 +3191,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + getErrorMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReportRSFatalErrorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder { @@ -2343,18 +3271,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2366,7 +3297,7 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverBuilder_ == null) { @@ -2379,20 +3310,20 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2400,17 +3331,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest(this); int from_bitField0_ = bitField0_; @@ -2431,7 +3352,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)other); @@ -2440,19 +3361,21 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance()) return this; if (other.hasServer()) { mergeServer(other.getServer()); } if (other.hasErrorMessage()) { - setErrorMessage(other.getErrorMessage()); + bitField0_ |= 0x00000002; + errorMessage_ = other.errorMessage_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServer()) { @@ -2468,57 +3391,47 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - errorMessage_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName server = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_; @@ -2526,6 +3439,13 @@ public final class RegionServerStatusProtos { return serverBuilder_.getMessage(); } } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { @@ -2539,6 +3459,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public Builder setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { @@ -2550,6 +3477,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -2566,6 +3500,13 @@ public final class RegionServerStatusProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -2576,11 +3517,25 @@ public final class RegionServerStatusProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerFieldBuilder().getBuilder(); } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); @@ -2588,6 +3543,13 @@ public final class RegionServerStatusProtos { return server_; } } + /** + * required .ServerName server = 1; + * + *
+       ** name of the server experiencing the error 
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { @@ -2601,23 +3563,66 @@ public final class RegionServerStatusProtos { } return serverBuilder_; } - + // required string errorMessage = 2; private java.lang.Object errorMessage_ = ""; + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getErrorMessage() { + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ + public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); errorMessage_ = s; return s; } else { - return (String) ref; - } - } - public Builder setErrorMessage(String value) { + return (java.lang.String) ref; + } + } + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ + public com.google.protobuf.ByteString + getErrorMessageBytes() { + java.lang.Object ref = errorMessage_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + errorMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ + public Builder setErrorMessage( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2626,95 +3631,175 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ public Builder clearErrorMessage() { bitField0_ = (bitField0_ & ~0x00000002); errorMessage_ = getDefaultInstance().getErrorMessage(); onChanged(); return this; } - void setErrorMessage(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * required string errorMessage = 2; + * + *
+       ** informative text to expose in the master logs and UI 
+       * 
+ */ + public Builder setErrorMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; errorMessage_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorRequest) } - + static { defaultInstance = new ReportRSFatalErrorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReportRSFatalErrorRequest) } - + public interface ReportRSFatalErrorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code ReportRSFatalErrorResponse} + */ public static final class ReportRSFatalErrorResponse extends com.google.protobuf.GeneratedMessage implements ReportRSFatalErrorResponseOrBuilder { // Use ReportRSFatalErrorResponse.newBuilder() to construct. - private ReportRSFatalErrorResponse(Builder builder) { + private ReportRSFatalErrorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReportRSFatalErrorResponse(boolean noInit) {} - + private ReportRSFatalErrorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReportRSFatalErrorResponse defaultInstance; public static ReportRSFatalErrorResponse getDefaultInstance() { return defaultInstance; } - + public ReportRSFatalErrorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReportRSFatalErrorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReportRSFatalErrorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRSFatalErrorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2724,101 +3809,95 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReportRSFatalErrorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder { @@ -2826,18 +3905,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2848,25 +3930,25 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2874,23 +3956,13 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)other); @@ -2899,102 +3971,187 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorResponse) } - + static { defaultInstance = new ReportRSFatalErrorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReportRSFatalErrorResponse) } - + public interface GetLastFlushedSequenceIdRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes regionName = 1; + /** + * required bytes regionName = 1; + * + *
+     ** region name 
+     * 
+ */ boolean hasRegionName(); + /** + * required bytes regionName = 1; + * + *
+     ** region name 
+     * 
+ */ com.google.protobuf.ByteString getRegionName(); } + /** + * Protobuf type {@code GetLastFlushedSequenceIdRequest} + */ public static final class GetLastFlushedSequenceIdRequest extends com.google.protobuf.GeneratedMessage implements GetLastFlushedSequenceIdRequestOrBuilder { // Use GetLastFlushedSequenceIdRequest.newBuilder() to construct. - private GetLastFlushedSequenceIdRequest(Builder builder) { + private GetLastFlushedSequenceIdRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetLastFlushedSequenceIdRequest(boolean noInit) {} - + private GetLastFlushedSequenceIdRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetLastFlushedSequenceIdRequest defaultInstance; public static GetLastFlushedSequenceIdRequest getDefaultInstance() { return defaultInstance; } - + public GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetLastFlushedSequenceIdRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + regionName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetLastFlushedSequenceIdRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetLastFlushedSequenceIdRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes regionName = 1; public static final int REGIONNAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString regionName_; + /** + * required bytes regionName = 1; + * + *
+     ** region name 
+     * 
+ */ public boolean hasRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes regionName = 1; + * + *
+     ** region name 
+     * 
+ */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - + private void initFields() { regionName_ = com.google.protobuf.ByteString.EMPTY; } @@ -3002,7 +4159,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRegionName()) { memoizedIsInitialized = 0; return false; @@ -3010,7 +4167,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3019,12 +4176,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3034,14 +4191,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3051,7 +4208,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) obj; - + boolean result = true; result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { @@ -3062,9 +4219,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionName()) { @@ -3072,89 +4233,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetLastFlushedSequenceIdRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder { @@ -3162,18 +4313,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3184,27 +4338,27 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); regionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3212,17 +4366,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest(this); int from_bitField0_ = bitField0_; @@ -3235,7 +4379,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)other); @@ -3244,7 +4388,7 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance()) return this; if (other.hasRegionName()) { @@ -3253,7 +4397,7 @@ public final class RegionServerStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRegionName()) { @@ -3261,49 +4405,55 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - regionName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes regionName = 1; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes regionName = 1; + * + *
+       ** region name 
+       * 
+ */ public boolean hasRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes regionName = 1; + * + *
+       ** region name 
+       * 
+ */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } + /** + * required bytes regionName = 1; + * + *
+       ** region name 
+       * 
+ */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -3313,70 +4463,172 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + /** + * required bytes regionName = 1; + * + *
+       ** region name 
+       * 
+ */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000001); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdRequest) } - + static { defaultInstance = new GetLastFlushedSequenceIdRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdRequest) } - + public interface GetLastFlushedSequenceIdResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint64 lastFlushedSequenceId = 1; + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+     ** the last HLog sequence id flushed from MemStore to HFile for the region 
+     * 
+ */ boolean hasLastFlushedSequenceId(); + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+     ** the last HLog sequence id flushed from MemStore to HFile for the region 
+     * 
+ */ long getLastFlushedSequenceId(); } + /** + * Protobuf type {@code GetLastFlushedSequenceIdResponse} + */ public static final class GetLastFlushedSequenceIdResponse extends com.google.protobuf.GeneratedMessage implements GetLastFlushedSequenceIdResponseOrBuilder { // Use GetLastFlushedSequenceIdResponse.newBuilder() to construct. - private GetLastFlushedSequenceIdResponse(Builder builder) { + private GetLastFlushedSequenceIdResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private GetLastFlushedSequenceIdResponse(boolean noInit) {} - + private GetLastFlushedSequenceIdResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final GetLastFlushedSequenceIdResponse defaultInstance; public static GetLastFlushedSequenceIdResponse getDefaultInstance() { return defaultInstance; } - + public GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetLastFlushedSequenceIdResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushedSequenceId_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetLastFlushedSequenceIdResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetLastFlushedSequenceIdResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required uint64 lastFlushedSequenceId = 1; public static final int LASTFLUSHEDSEQUENCEID_FIELD_NUMBER = 1; private long lastFlushedSequenceId_; + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+     ** the last HLog sequence id flushed from MemStore to HFile for the region 
+     * 
+ */ public boolean hasLastFlushedSequenceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+     ** the last HLog sequence id flushed from MemStore to HFile for the region 
+     * 
+ */ public long getLastFlushedSequenceId() { return lastFlushedSequenceId_; } - + private void initFields() { lastFlushedSequenceId_ = 0L; } @@ -3384,7 +4636,7 @@ public final class RegionServerStatusProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLastFlushedSequenceId()) { memoizedIsInitialized = 0; return false; @@ -3392,7 +4644,7 @@ public final class RegionServerStatusProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3401,12 +4653,12 @@ public final class RegionServerStatusProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3416,14 +4668,14 @@ public final class RegionServerStatusProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3433,7 +4685,7 @@ public final class RegionServerStatusProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) obj; - + boolean result = true; result = result && (hasLastFlushedSequenceId() == other.hasLastFlushedSequenceId()); if (hasLastFlushedSequenceId()) { @@ -3444,9 +4696,13 @@ public final class RegionServerStatusProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLastFlushedSequenceId()) { @@ -3454,89 +4710,79 @@ public final class RegionServerStatusProtos { hash = (53 * hash) + hashLong(getLastFlushedSequenceId()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code GetLastFlushedSequenceIdResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder { @@ -3544,18 +4790,21 @@ public final class RegionServerStatusProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3566,27 +4815,27 @@ public final class RegionServerStatusProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); lastFlushedSequenceId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse build() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3594,17 +4843,7 @@ public final class RegionServerStatusProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse(this); int from_bitField0_ = bitField0_; @@ -3617,7 +4856,7 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)other); @@ -3626,7 +4865,7 @@ public final class RegionServerStatusProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()) return this; if (other.hasLastFlushedSequenceId()) { @@ -3635,7 +4874,7 @@ public final class RegionServerStatusProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLastFlushedSequenceId()) { @@ -3643,100 +4882,148 @@ public final class RegionServerStatusProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lastFlushedSequenceId_ = input.readUInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint64 lastFlushedSequenceId = 1; private long lastFlushedSequenceId_ ; + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+       ** the last HLog sequence id flushed from MemStore to HFile for the region 
+       * 
+ */ public boolean hasLastFlushedSequenceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+       ** the last HLog sequence id flushed from MemStore to HFile for the region 
+       * 
+ */ public long getLastFlushedSequenceId() { return lastFlushedSequenceId_; } + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+       ** the last HLog sequence id flushed from MemStore to HFile for the region 
+       * 
+ */ public Builder setLastFlushedSequenceId(long value) { bitField0_ |= 0x00000001; lastFlushedSequenceId_ = value; onChanged(); return this; } + /** + * required uint64 lastFlushedSequenceId = 1; + * + *
+       ** the last HLog sequence id flushed from MemStore to HFile for the region 
+       * 
+ */ public Builder clearLastFlushedSequenceId() { bitField0_ = (bitField0_ & ~0x00000001); lastFlushedSequenceId_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdResponse) } - + static { defaultInstance = new GetLastFlushedSequenceIdResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdResponse) } - + + /** + * Protobuf service {@code RegionServerStatusService} + */ public static abstract class RegionServerStatusService implements com.google.protobuf.Service { protected RegionServerStatusService() {} - + public interface Interface { + /** + * rpc regionServerStartup(.RegionServerStartupRequest) returns (.RegionServerStartupResponse); + * + *
+       ** Called when a region server first starts. 
+       * 
+ */ public abstract void regionServerStartup( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc regionServerReport(.RegionServerReportRequest) returns (.RegionServerReportResponse); + * + *
+       ** Called to report the load the RegionServer is under. 
+       * 
+ */ public abstract void regionServerReport( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc reportRSFatalError(.ReportRSFatalErrorRequest) returns (.ReportRSFatalErrorResponse); + * + *
+       **
+       * Called by a region server to report a fatal error that is causing it to
+       * abort.
+       * 
+ */ public abstract void reportRSFatalError( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getLastFlushedSequenceId(.GetLastFlushedSequenceIdRequest) returns (.GetLastFlushedSequenceIdResponse); + * + *
+       ** Called to get the sequence id of the last MemStore entry flushed to an
+       * HFile for a specified region. Used by the region server to speed up
+       * log splitting. 
+       * 
+ */ public abstract void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new RegionServerStatusService() { @@ -3747,7 +5034,7 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done) { impl.regionServerStartup(controller, request, done); } - + @java.lang.Override public void regionServerReport( com.google.protobuf.RpcController controller, @@ -3755,7 +5042,7 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done) { impl.regionServerReport(controller, request, done); } - + @java.lang.Override public void reportRSFatalError( com.google.protobuf.RpcController controller, @@ -3763,7 +5050,7 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done) { impl.reportRSFatalError(controller, request, done); } - + @java.lang.Override public void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, @@ -3771,10 +5058,10 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done) { impl.getLastFlushedSequenceId(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -3782,7 +5069,7 @@ public final class RegionServerStatusProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3806,7 +5093,7 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3828,7 +5115,7 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3850,30 +5137,62 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc regionServerStartup(.RegionServerStartupRequest) returns (.RegionServerStartupResponse); + * + *
+     ** Called when a region server first starts. 
+     * 
+ */ public abstract void regionServerStartup( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc regionServerReport(.RegionServerReportRequest) returns (.RegionServerReportResponse); + * + *
+     ** Called to report the load the RegionServer is under. 
+     * 
+ */ public abstract void regionServerReport( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc reportRSFatalError(.ReportRSFatalErrorRequest) returns (.ReportRSFatalErrorResponse); + * + *
+     **
+     * Called by a region server to report a fatal error that is causing it to
+     * abort.
+     * 
+ */ public abstract void reportRSFatalError( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc getLastFlushedSequenceId(.GetLastFlushedSequenceIdRequest) returns (.GetLastFlushedSequenceIdResponse); + * + *
+     ** Called to get the sequence id of the last MemStore entry flushed to an
+     * HFile for a specified region. Used by the region server to speed up
+     * log splitting. 
+     * 
+ */ public abstract void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -3883,7 +5202,7 @@ public final class RegionServerStatusProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3920,7 +5239,7 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3942,7 +5261,7 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3964,23 +5283,23 @@ public final class RegionServerStatusProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void regionServerStartup( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, @@ -3995,7 +5314,7 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance())); } - + public void regionServerReport( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, @@ -4010,7 +5329,7 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance())); } - + public void reportRSFatalError( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, @@ -4025,7 +5344,7 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance())); } - + public void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, @@ -4041,41 +5360,41 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) @@ -4086,8 +5405,8 @@ public final class RegionServerStatusProtos { request, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) @@ -4098,8 +5417,8 @@ public final class RegionServerStatusProtos { request, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) @@ -4110,8 +5429,8 @@ public final class RegionServerStatusProtos { request, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) @@ -4122,10 +5441,12 @@ public final class RegionServerStatusProtos { request, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:RegionServerStatusService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_RegionServerStartupRequest_descriptor; private static @@ -4166,7 +5487,7 @@ public final class RegionServerStatusProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -4211,65 +5532,49 @@ public final class RegionServerStatusProtos { internal_static_RegionServerStartupRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionServerStartupRequest_descriptor, - new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); + new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", }); internal_static_RegionServerStartupResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RegionServerStartupResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionServerStartupResponse_descriptor, - new java.lang.String[] { "MapEntries", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); + new java.lang.String[] { "MapEntries", }); internal_static_RegionServerReportRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_RegionServerReportRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionServerReportRequest_descriptor, - new java.lang.String[] { "Server", "Load", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); + new java.lang.String[] { "Server", "Load", }); internal_static_RegionServerReportResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_RegionServerReportResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionServerReportResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); + new java.lang.String[] { }); internal_static_ReportRSFatalErrorRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_ReportRSFatalErrorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReportRSFatalErrorRequest_descriptor, - new java.lang.String[] { "Server", "ErrorMessage", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); + new java.lang.String[] { "Server", "ErrorMessage", }); internal_static_ReportRSFatalErrorResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ReportRSFatalErrorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReportRSFatalErrorResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); + new java.lang.String[] { }); internal_static_GetLastFlushedSequenceIdRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetLastFlushedSequenceIdRequest_descriptor, - new java.lang.String[] { "RegionName", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); + new java.lang.String[] { "RegionName", }); internal_static_GetLastFlushedSequenceIdResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetLastFlushedSequenceIdResponse_descriptor, - new java.lang.String[] { "LastFlushedSequenceId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); + new java.lang.String[] { "LastFlushedSequenceId", }); return null; } }; @@ -4279,6 +5584,6 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java index 162639e..47e37d2 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java @@ -10,122 +10,255 @@ public final class RowProcessorProtos { } public interface RowProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string rowProcessorClassName = 1; + /** + * required string rowProcessorClassName = 1; + */ boolean hasRowProcessorClassName(); - String getRowProcessorClassName(); - + /** + * required string rowProcessorClassName = 1; + */ + java.lang.String getRowProcessorClassName(); + /** + * required string rowProcessorClassName = 1; + */ + com.google.protobuf.ByteString + getRowProcessorClassNameBytes(); + // optional string rowProcessorInitializerMessageName = 2; + /** + * optional string rowProcessorInitializerMessageName = 2; + */ boolean hasRowProcessorInitializerMessageName(); - String getRowProcessorInitializerMessageName(); - + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + java.lang.String getRowProcessorInitializerMessageName(); + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + com.google.protobuf.ByteString + getRowProcessorInitializerMessageNameBytes(); + // optional bytes rowProcessorInitializerMessage = 3; + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ boolean hasRowProcessorInitializerMessage(); + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ com.google.protobuf.ByteString getRowProcessorInitializerMessage(); } + /** + * Protobuf type {@code RowProcessorRequest} + */ public static final class RowProcessorRequest extends com.google.protobuf.GeneratedMessage implements RowProcessorRequestOrBuilder { // Use RowProcessorRequest.newBuilder() to construct. - private RowProcessorRequest(Builder builder) { + private RowProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RowProcessorRequest(boolean noInit) {} - + private RowProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RowProcessorRequest defaultInstance; public static RowProcessorRequest getDefaultInstance() { return defaultInstance; } - + public RowProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + rowProcessorClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + rowProcessorInitializerMessageName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + rowProcessorInitializerMessage_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string rowProcessorClassName = 1; public static final int ROWPROCESSORCLASSNAME_FIELD_NUMBER = 1; private java.lang.Object rowProcessorClassName_; + /** + * required string rowProcessorClassName = 1; + */ public boolean hasRowProcessorClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getRowProcessorClassName() { + /** + * required string rowProcessorClassName = 1; + */ + public java.lang.String getRowProcessorClassName() { java.lang.Object ref = rowProcessorClassName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { rowProcessorClassName_ = s; } return s; } } - private com.google.protobuf.ByteString getRowProcessorClassNameBytes() { + /** + * required string rowProcessorClassName = 1; + */ + public com.google.protobuf.ByteString + getRowProcessorClassNameBytes() { java.lang.Object ref = rowProcessorClassName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); rowProcessorClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string rowProcessorInitializerMessageName = 2; public static final int ROWPROCESSORINITIALIZERMESSAGENAME_FIELD_NUMBER = 2; private java.lang.Object rowProcessorInitializerMessageName_; + /** + * optional string rowProcessorInitializerMessageName = 2; + */ public boolean hasRowProcessorInitializerMessageName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getRowProcessorInitializerMessageName() { + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public java.lang.String getRowProcessorInitializerMessageName() { java.lang.Object ref = rowProcessorInitializerMessageName_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { rowProcessorInitializerMessageName_ = s; } return s; } } - private com.google.protobuf.ByteString getRowProcessorInitializerMessageNameBytes() { + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public com.google.protobuf.ByteString + getRowProcessorInitializerMessageNameBytes() { java.lang.Object ref = rowProcessorInitializerMessageName_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); rowProcessorInitializerMessageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional bytes rowProcessorInitializerMessage = 3; public static final int ROWPROCESSORINITIALIZERMESSAGE_FIELD_NUMBER = 3; private com.google.protobuf.ByteString rowProcessorInitializerMessage_; + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public boolean hasRowProcessorInitializerMessage() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { return rowProcessorInitializerMessage_; } - + private void initFields() { rowProcessorClassName_ = ""; rowProcessorInitializerMessageName_ = ""; @@ -135,7 +268,7 @@ public final class RowProcessorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRowProcessorClassName()) { memoizedIsInitialized = 0; return false; @@ -143,7 +276,7 @@ public final class RowProcessorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -158,12 +291,12 @@ public final class RowProcessorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -181,14 +314,14 @@ public final class RowProcessorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -198,7 +331,7 @@ public final class RowProcessorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) obj; - + boolean result = true; result = result && (hasRowProcessorClassName() == other.hasRowProcessorClassName()); if (hasRowProcessorClassName()) { @@ -219,9 +352,13 @@ public final class RowProcessorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRowProcessorClassName()) { @@ -237,89 +374,79 @@ public final class RowProcessorProtos { hash = (53 * hash) + getRowProcessorInitializerMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RowProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequestOrBuilder { @@ -327,18 +454,21 @@ public final class RowProcessorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -349,7 +479,7 @@ public final class RowProcessorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); rowProcessorClassName_ = ""; @@ -360,20 +490,20 @@ public final class RowProcessorProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest build() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -381,17 +511,7 @@ public final class RowProcessorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest(this); int from_bitField0_ = bitField0_; @@ -412,7 +532,7 @@ public final class RowProcessorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)other); @@ -421,14 +541,18 @@ public final class RowProcessorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance()) return this; if (other.hasRowProcessorClassName()) { - setRowProcessorClassName(other.getRowProcessorClassName()); + bitField0_ |= 0x00000001; + rowProcessorClassName_ = other.rowProcessorClassName_; + onChanged(); } if (other.hasRowProcessorInitializerMessageName()) { - setRowProcessorInitializerMessageName(other.getRowProcessorInitializerMessageName()); + bitField0_ |= 0x00000002; + rowProcessorInitializerMessageName_ = other.rowProcessorInitializerMessageName_; + onChanged(); } if (other.hasRowProcessorInitializerMessage()) { setRowProcessorInitializerMessage(other.getRowProcessorInitializerMessage()); @@ -436,7 +560,7 @@ public final class RowProcessorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRowProcessorClassName()) { @@ -444,67 +568,69 @@ public final class RowProcessorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - rowProcessorClassName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - rowProcessorInitializerMessageName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - rowProcessorInitializerMessage_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string rowProcessorClassName = 1; private java.lang.Object rowProcessorClassName_ = ""; + /** + * required string rowProcessorClassName = 1; + */ public boolean hasRowProcessorClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getRowProcessorClassName() { + /** + * required string rowProcessorClassName = 1; + */ + public java.lang.String getRowProcessorClassName() { java.lang.Object ref = rowProcessorClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); rowProcessorClassName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setRowProcessorClassName(String value) { + /** + * required string rowProcessorClassName = 1; + */ + public com.google.protobuf.ByteString + getRowProcessorClassNameBytes() { + java.lang.Object ref = rowProcessorClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rowProcessorClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string rowProcessorClassName = 1; + */ + public Builder setRowProcessorClassName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -513,34 +639,72 @@ public final class RowProcessorProtos { onChanged(); return this; } + /** + * required string rowProcessorClassName = 1; + */ public Builder clearRowProcessorClassName() { bitField0_ = (bitField0_ & ~0x00000001); rowProcessorClassName_ = getDefaultInstance().getRowProcessorClassName(); onChanged(); return this; } - void setRowProcessorClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string rowProcessorClassName = 1; + */ + public Builder setRowProcessorClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; rowProcessorClassName_ = value; onChanged(); + return this; } - + // optional string rowProcessorInitializerMessageName = 2; private java.lang.Object rowProcessorInitializerMessageName_ = ""; + /** + * optional string rowProcessorInitializerMessageName = 2; + */ public boolean hasRowProcessorInitializerMessageName() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getRowProcessorInitializerMessageName() { + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public java.lang.String getRowProcessorInitializerMessageName() { java.lang.Object ref = rowProcessorInitializerMessageName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); rowProcessorInitializerMessageName_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public com.google.protobuf.ByteString + getRowProcessorInitializerMessageNameBytes() { + java.lang.Object ref = rowProcessorInitializerMessageName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rowProcessorInitializerMessageName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setRowProcessorInitializerMessageName(String value) { + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public Builder setRowProcessorInitializerMessageName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -549,26 +713,46 @@ public final class RowProcessorProtos { onChanged(); return this; } + /** + * optional string rowProcessorInitializerMessageName = 2; + */ public Builder clearRowProcessorInitializerMessageName() { bitField0_ = (bitField0_ & ~0x00000002); rowProcessorInitializerMessageName_ = getDefaultInstance().getRowProcessorInitializerMessageName(); onChanged(); return this; } - void setRowProcessorInitializerMessageName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; + /** + * optional string rowProcessorInitializerMessageName = 2; + */ + public Builder setRowProcessorInitializerMessageNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; rowProcessorInitializerMessageName_ = value; onChanged(); + return this; } - + // optional bytes rowProcessorInitializerMessage = 3; private com.google.protobuf.ByteString rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public boolean hasRowProcessorInitializerMessage() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { return rowProcessorInitializerMessage_; } + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public Builder setRowProcessorInitializerMessage(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -578,70 +762,152 @@ public final class RowProcessorProtos { onChanged(); return this; } + /** + * optional bytes rowProcessorInitializerMessage = 3; + */ public Builder clearRowProcessorInitializerMessage() { bitField0_ = (bitField0_ & ~0x00000004); rowProcessorInitializerMessage_ = getDefaultInstance().getRowProcessorInitializerMessage(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RowProcessorRequest) } - + static { defaultInstance = new RowProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RowProcessorRequest) } - + public interface RowProcessorResultOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes rowProcessorResult = 1; + /** + * required bytes rowProcessorResult = 1; + */ boolean hasRowProcessorResult(); + /** + * required bytes rowProcessorResult = 1; + */ com.google.protobuf.ByteString getRowProcessorResult(); } + /** + * Protobuf type {@code RowProcessorResult} + */ public static final class RowProcessorResult extends com.google.protobuf.GeneratedMessage implements RowProcessorResultOrBuilder { // Use RowProcessorResult.newBuilder() to construct. - private RowProcessorResult(Builder builder) { + private RowProcessorResult(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RowProcessorResult(boolean noInit) {} - + private RowProcessorResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RowProcessorResult defaultInstance; public static RowProcessorResult getDefaultInstance() { return defaultInstance; } - + public RowProcessorResult getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowProcessorResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + rowProcessorResult_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowProcessorResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowProcessorResult(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required bytes rowProcessorResult = 1; public static final int ROWPROCESSORRESULT_FIELD_NUMBER = 1; private com.google.protobuf.ByteString rowProcessorResult_; + /** + * required bytes rowProcessorResult = 1; + */ public boolean hasRowProcessorResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes rowProcessorResult = 1; + */ public com.google.protobuf.ByteString getRowProcessorResult() { return rowProcessorResult_; } - + private void initFields() { rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; } @@ -649,7 +915,7 @@ public final class RowProcessorProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRowProcessorResult()) { memoizedIsInitialized = 0; return false; @@ -657,7 +923,7 @@ public final class RowProcessorProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -666,12 +932,12 @@ public final class RowProcessorProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -681,14 +947,14 @@ public final class RowProcessorProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -698,7 +964,7 @@ public final class RowProcessorProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) obj; - + boolean result = true; result = result && (hasRowProcessorResult() == other.hasRowProcessorResult()); if (hasRowProcessorResult()) { @@ -709,9 +975,13 @@ public final class RowProcessorProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRowProcessorResult()) { @@ -719,89 +989,79 @@ public final class RowProcessorProtos { hash = (53 * hash) + getRowProcessorResult().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RowProcessorResult} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResultOrBuilder { @@ -809,18 +1069,21 @@ public final class RowProcessorProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -831,27 +1094,27 @@ public final class RowProcessorProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult build() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); if (!result.isInitialized()) { @@ -859,17 +1122,7 @@ public final class RowProcessorProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult(this); int from_bitField0_ = bitField0_; @@ -882,7 +1135,7 @@ public final class RowProcessorProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)other); @@ -891,7 +1144,7 @@ public final class RowProcessorProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()) return this; if (other.hasRowProcessorResult()) { @@ -900,7 +1153,7 @@ public final class RowProcessorProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRowProcessorResult()) { @@ -908,49 +1161,43 @@ public final class RowProcessorProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - rowProcessorResult_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes rowProcessorResult = 1; private com.google.protobuf.ByteString rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes rowProcessorResult = 1; + */ public boolean hasRowProcessorResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes rowProcessorResult = 1; + */ public com.google.protobuf.ByteString getRowProcessorResult() { return rowProcessorResult_; } + /** + * required bytes rowProcessorResult = 1; + */ public Builder setRowProcessorResult(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -960,36 +1207,45 @@ public final class RowProcessorProtos { onChanged(); return this; } + /** + * required bytes rowProcessorResult = 1; + */ public Builder clearRowProcessorResult() { bitField0_ = (bitField0_ & ~0x00000001); rowProcessorResult_ = getDefaultInstance().getRowProcessorResult(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RowProcessorResult) } - + static { defaultInstance = new RowProcessorResult(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RowProcessorResult) } - + + /** + * Protobuf service {@code RowProcessorService} + */ public static abstract class RowProcessorService implements com.google.protobuf.Service { protected RowProcessorService() {} - + public interface Interface { + /** + * rpc process(.RowProcessorRequest) returns (.RowProcessorResult); + */ public abstract void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new RowProcessorService() { @@ -1000,10 +1256,10 @@ public final class RowProcessorProtos { com.google.protobuf.RpcCallback done) { impl.process(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -1011,7 +1267,7 @@ public final class RowProcessorProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -1029,7 +1285,7 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1045,7 +1301,7 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1061,15 +1317,18 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc process(.RowProcessorRequest) returns (.RowProcessorResult); + */ public abstract void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -1079,7 +1338,7 @@ public final class RowProcessorProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -1101,7 +1360,7 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1117,7 +1376,7 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -1133,23 +1392,23 @@ public final class RowProcessorProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, @@ -1165,26 +1424,26 @@ public final class RowProcessorProtos { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) @@ -1195,10 +1454,12 @@ public final class RowProcessorProtos { request, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:RowProcessorService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_RowProcessorRequest_descriptor; private static @@ -1209,7 +1470,7 @@ public final class RowProcessorProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RowProcessorResult_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1238,17 +1499,13 @@ public final class RowProcessorProtos { internal_static_RowProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowProcessorRequest_descriptor, - new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", }, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); + new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", }); internal_static_RowProcessorResult_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RowProcessorResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowProcessorResult_descriptor, - new java.lang.String[] { "RowProcessorResult", }, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); + new java.lang.String[] { "RowProcessorResult", }); return null; } }; @@ -1257,6 +1514,6 @@ public final class RowProcessorProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java index 2766a7b..a2463f0 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java @@ -10,135 +10,313 @@ public final class SecureBulkLoadProtos { } public interface SecureBulkLoadHFilesRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ java.util.List getFamilyPathList(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ int getFamilyPathCount(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ java.util.List getFamilyPathOrBuilderList(); + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index); - + // optional bool assignSeqNum = 2; + /** + * optional bool assignSeqNum = 2; + */ boolean hasAssignSeqNum(); + /** + * optional bool assignSeqNum = 2; + */ boolean getAssignSeqNum(); - + // required .DelegationTokenProto fsToken = 3; + /** + * required .DelegationTokenProto fsToken = 3; + */ boolean hasFsToken(); + /** + * required .DelegationTokenProto fsToken = 3; + */ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken(); + /** + * required .DelegationTokenProto fsToken = 3; + */ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder(); - + // required string bulkToken = 4; + /** + * required string bulkToken = 4; + */ boolean hasBulkToken(); - String getBulkToken(); + /** + * required string bulkToken = 4; + */ + java.lang.String getBulkToken(); + /** + * required string bulkToken = 4; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); } + /** + * Protobuf type {@code SecureBulkLoadHFilesRequest} + */ public static final class SecureBulkLoadHFilesRequest extends com.google.protobuf.GeneratedMessage implements SecureBulkLoadHFilesRequestOrBuilder { // Use SecureBulkLoadHFilesRequest.newBuilder() to construct. - private SecureBulkLoadHFilesRequest(Builder builder) { + private SecureBulkLoadHFilesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SecureBulkLoadHFilesRequest(boolean noInit) {} - + private SecureBulkLoadHFilesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SecureBulkLoadHFilesRequest defaultInstance; public static SecureBulkLoadHFilesRequest getDefaultInstance() { return defaultInstance; } - + public SecureBulkLoadHFilesRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SecureBulkLoadHFilesRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyPath_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); + break; + } + case 16: { + bitField0_ |= 0x00000001; + assignSeqNum_ = input.readBool(); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = fsToken_.toBuilder(); + } + fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(fsToken_); + fsToken_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 34: { + bitField0_ |= 0x00000004; + bulkToken_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SecureBulkLoadHFilesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SecureBulkLoadHFilesRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; public static final int FAMILYPATH_FIELD_NUMBER = 1; private java.util.List familyPath_; + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public java.util.List getFamilyPathList() { return familyPath_; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public java.util.List getFamilyPathOrBuilderList() { return familyPath_; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public int getFamilyPathCount() { return familyPath_.size(); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { return familyPath_.get(index); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { return familyPath_.get(index); } - + // optional bool assignSeqNum = 2; public static final int ASSIGNSEQNUM_FIELD_NUMBER = 2; private boolean assignSeqNum_; + /** + * optional bool assignSeqNum = 2; + */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bool assignSeqNum = 2; + */ public boolean getAssignSeqNum() { return assignSeqNum_; } - + // required .DelegationTokenProto fsToken = 3; public static final int FSTOKEN_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_; + /** + * required .DelegationTokenProto fsToken = 3; + */ public boolean hasFsToken() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .DelegationTokenProto fsToken = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { return fsToken_; } + /** + * required .DelegationTokenProto fsToken = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { return fsToken_; } - + // required string bulkToken = 4; public static final int BULKTOKEN_FIELD_NUMBER = 4; private java.lang.Object bulkToken_; + /** + * required string bulkToken = 4; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getBulkToken() { + /** + * required string bulkToken = 4; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } - private com.google.protobuf.ByteString getBulkTokenBytes() { + /** + * required string bulkToken = 4; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); bulkToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; @@ -149,7 +327,7 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFsToken()) { memoizedIsInitialized = 0; return false; @@ -167,7 +345,7 @@ public final class SecureBulkLoadProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -185,12 +363,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; for (int i = 0; i < familyPath_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -212,14 +390,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -229,7 +407,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) obj; - + boolean result = true; result = result && getFamilyPathList() .equals(other.getFamilyPathList()); @@ -252,9 +430,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getFamilyPathCount() > 0) { @@ -274,89 +456,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + getBulkToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SecureBulkLoadHFilesRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestOrBuilder { @@ -364,18 +536,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -388,7 +563,7 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (familyPathBuilder_ == null) { @@ -409,20 +584,20 @@ public final class SecureBulkLoadProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial(); if (!result.isInitialized()) { @@ -430,17 +605,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest(this); int from_bitField0_ = bitField0_; @@ -474,7 +639,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)other); @@ -483,7 +648,7 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance()) return this; if (familyPathBuilder_ == null) { @@ -519,12 +684,14 @@ public final class SecureBulkLoadProtos { mergeFsToken(other.getFsToken()); } if (other.hasBulkToken()) { - setBulkToken(other.getBulkToken()); + bitField0_ |= 0x00000008; + bulkToken_ = other.bulkToken_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFsToken()) { @@ -542,61 +709,26 @@ public final class SecureBulkLoadProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyPath(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - assignSeqNum_ = input.readBool(); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder(); - if (hasFsToken()) { - subBuilder.mergeFrom(getFsToken()); - } - input.readMessage(subBuilder, extensionRegistry); - setFsToken(subBuilder.buildPartial()); - break; - } - case 34: { - bitField0_ |= 0x00000008; - bulkToken_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; private java.util.List familyPath_ = java.util.Collections.emptyList(); @@ -606,10 +738,13 @@ public final class SecureBulkLoadProtos { bitField0_ |= 0x00000001; } } - + private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - + + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public java.util.List getFamilyPathList() { if (familyPathBuilder_ == null) { return java.util.Collections.unmodifiableList(familyPath_); @@ -617,6 +752,9 @@ public final class SecureBulkLoadProtos { return familyPathBuilder_.getMessageList(); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public int getFamilyPathCount() { if (familyPathBuilder_ == null) { return familyPath_.size(); @@ -624,6 +762,9 @@ public final class SecureBulkLoadProtos { return familyPathBuilder_.getCount(); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); @@ -631,6 +772,9 @@ public final class SecureBulkLoadProtos { return familyPathBuilder_.getMessage(index); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { @@ -645,6 +789,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -656,6 +803,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { @@ -669,6 +819,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { @@ -683,6 +836,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder addFamilyPath( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -694,6 +850,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { @@ -705,6 +864,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder addAllFamilyPath( java.lang.Iterable values) { if (familyPathBuilder_ == null) { @@ -716,6 +878,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder clearFamilyPath() { if (familyPathBuilder_ == null) { familyPath_ = java.util.Collections.emptyList(); @@ -726,6 +891,9 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public Builder removeFamilyPath(int index) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); @@ -736,10 +904,16 @@ public final class SecureBulkLoadProtos { } return this; } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().getBuilder(index); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { if (familyPathBuilder_ == null) { @@ -747,6 +921,9 @@ public final class SecureBulkLoadProtos { return familyPathBuilder_.getMessageOrBuilder(index); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public java.util.List getFamilyPathOrBuilderList() { if (familyPathBuilder_ != null) { @@ -755,15 +932,24 @@ public final class SecureBulkLoadProtos { return java.util.Collections.unmodifiableList(familyPath_); } } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { return getFamilyPathFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } + /** + * repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + */ public java.util.List getFamilyPathBuilderList() { return getFamilyPathFieldBuilder().getBuilderList(); @@ -782,35 +968,53 @@ public final class SecureBulkLoadProtos { } return familyPathBuilder_; } - + // optional bool assignSeqNum = 2; private boolean assignSeqNum_ ; + /** + * optional bool assignSeqNum = 2; + */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bool assignSeqNum = 2; + */ public boolean getAssignSeqNum() { return assignSeqNum_; } + /** + * optional bool assignSeqNum = 2; + */ public Builder setAssignSeqNum(boolean value) { bitField0_ |= 0x00000002; assignSeqNum_ = value; onChanged(); return this; } + /** + * optional bool assignSeqNum = 2; + */ public Builder clearAssignSeqNum() { bitField0_ = (bitField0_ & ~0x00000002); assignSeqNum_ = false; onChanged(); return this; } - + // required .DelegationTokenProto fsToken = 3; private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> fsTokenBuilder_; + /** + * required .DelegationTokenProto fsToken = 3; + */ public boolean hasFsToken() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required .DelegationTokenProto fsToken = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { if (fsTokenBuilder_ == null) { return fsToken_; @@ -818,6 +1022,9 @@ public final class SecureBulkLoadProtos { return fsTokenBuilder_.getMessage(); } } + /** + * required .DelegationTokenProto fsToken = 3; + */ public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { if (fsTokenBuilder_ == null) { if (value == null) { @@ -831,6 +1038,9 @@ public final class SecureBulkLoadProtos { bitField0_ |= 0x00000004; return this; } + /** + * required .DelegationTokenProto fsToken = 3; + */ public Builder setFsToken( org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder builderForValue) { if (fsTokenBuilder_ == null) { @@ -842,6 +1052,9 @@ public final class SecureBulkLoadProtos { bitField0_ |= 0x00000004; return this; } + /** + * required .DelegationTokenProto fsToken = 3; + */ public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { if (fsTokenBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && @@ -858,6 +1071,9 @@ public final class SecureBulkLoadProtos { bitField0_ |= 0x00000004; return this; } + /** + * required .DelegationTokenProto fsToken = 3; + */ public Builder clearFsToken() { if (fsTokenBuilder_ == null) { fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); @@ -868,11 +1084,17 @@ public final class SecureBulkLoadProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } + /** + * required .DelegationTokenProto fsToken = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder getFsTokenBuilder() { bitField0_ |= 0x00000004; onChanged(); return getFsTokenFieldBuilder().getBuilder(); } + /** + * required .DelegationTokenProto fsToken = 3; + */ public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { if (fsTokenBuilder_ != null) { return fsTokenBuilder_.getMessageOrBuilder(); @@ -880,6 +1102,9 @@ public final class SecureBulkLoadProtos { return fsToken_; } } + /** + * required .DelegationTokenProto fsToken = 3; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> getFsTokenFieldBuilder() { @@ -893,23 +1118,50 @@ public final class SecureBulkLoadProtos { } return fsTokenBuilder_; } - + // required string bulkToken = 4; private java.lang.Object bulkToken_ = ""; + /** + * required string bulkToken = 4; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public String getBulkToken() { + /** + * required string bulkToken = 4; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); bulkToken_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string bulkToken = 4; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setBulkToken(String value) { + /** + * required string bulkToken = 4; + */ + public Builder setBulkToken( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -918,75 +1170,165 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * required string bulkToken = 4; + */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000008); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } - void setBulkToken(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000008; + /** + * required string bulkToken = 4; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; bulkToken_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:SecureBulkLoadHFilesRequest) } - + static { defaultInstance = new SecureBulkLoadHFilesRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SecureBulkLoadHFilesRequest) } - + public interface SecureBulkLoadHFilesResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool loaded = 1; + /** + * required bool loaded = 1; + */ boolean hasLoaded(); + /** + * required bool loaded = 1; + */ boolean getLoaded(); } + /** + * Protobuf type {@code SecureBulkLoadHFilesResponse} + */ public static final class SecureBulkLoadHFilesResponse extends com.google.protobuf.GeneratedMessage implements SecureBulkLoadHFilesResponseOrBuilder { // Use SecureBulkLoadHFilesResponse.newBuilder() to construct. - private SecureBulkLoadHFilesResponse(Builder builder) { + private SecureBulkLoadHFilesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SecureBulkLoadHFilesResponse(boolean noInit) {} - + private SecureBulkLoadHFilesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SecureBulkLoadHFilesResponse defaultInstance; public static SecureBulkLoadHFilesResponse getDefaultInstance() { return defaultInstance; } - + public SecureBulkLoadHFilesResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SecureBulkLoadHFilesResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SecureBulkLoadHFilesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SecureBulkLoadHFilesResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool loaded = 1; public static final int LOADED_FIELD_NUMBER = 1; private boolean loaded_; + /** + * required bool loaded = 1; + */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool loaded = 1; + */ public boolean getLoaded() { return loaded_; } - + private void initFields() { loaded_ = false; } @@ -994,7 +1336,7 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLoaded()) { memoizedIsInitialized = 0; return false; @@ -1002,7 +1344,7 @@ public final class SecureBulkLoadProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1011,12 +1353,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1026,14 +1368,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1043,7 +1385,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) obj; - + boolean result = true; result = result && (hasLoaded() == other.hasLoaded()); if (hasLoaded()) { @@ -1054,9 +1396,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLoaded()) { @@ -1064,89 +1410,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + hashBoolean(getLoaded()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SecureBulkLoadHFilesResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponseOrBuilder { @@ -1154,18 +1490,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1176,27 +1515,27 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); loaded_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1204,17 +1543,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse(this); int from_bitField0_ = bitField0_; @@ -1227,7 +1556,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)other); @@ -1236,7 +1565,7 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()) return this; if (other.hasLoaded()) { @@ -1245,7 +1574,7 @@ public final class SecureBulkLoadProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLoaded()) { @@ -1253,205 +1582,352 @@ public final class SecureBulkLoadProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - loaded_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool loaded = 1; private boolean loaded_ ; + /** + * required bool loaded = 1; + */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool loaded = 1; + */ public boolean getLoaded() { return loaded_; } + /** + * required bool loaded = 1; + */ public Builder setLoaded(boolean value) { bitField0_ |= 0x00000001; loaded_ = value; onChanged(); return this; } + /** + * required bool loaded = 1; + */ public Builder clearLoaded() { bitField0_ = (bitField0_ & ~0x00000001); loaded_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SecureBulkLoadHFilesResponse) } - + static { defaultInstance = new SecureBulkLoadHFilesResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SecureBulkLoadHFilesResponse) } - + public interface DelegationTokenProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes identifier = 1; + /** + * optional bytes identifier = 1; + */ boolean hasIdentifier(); + /** + * optional bytes identifier = 1; + */ com.google.protobuf.ByteString getIdentifier(); - + // optional bytes password = 2; + /** + * optional bytes password = 2; + */ boolean hasPassword(); + /** + * optional bytes password = 2; + */ com.google.protobuf.ByteString getPassword(); - + // optional string kind = 3; + /** + * optional string kind = 3; + */ boolean hasKind(); - String getKind(); - + /** + * optional string kind = 3; + */ + java.lang.String getKind(); + /** + * optional string kind = 3; + */ + com.google.protobuf.ByteString + getKindBytes(); + // optional string service = 4; + /** + * optional string service = 4; + */ boolean hasService(); - String getService(); + /** + * optional string service = 4; + */ + java.lang.String getService(); + /** + * optional string service = 4; + */ + com.google.protobuf.ByteString + getServiceBytes(); } + /** + * Protobuf type {@code DelegationTokenProto} + */ public static final class DelegationTokenProto extends com.google.protobuf.GeneratedMessage implements DelegationTokenProtoOrBuilder { // Use DelegationTokenProto.newBuilder() to construct. - private DelegationTokenProto(Builder builder) { + private DelegationTokenProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private DelegationTokenProto(boolean noInit) {} - + private DelegationTokenProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final DelegationTokenProto defaultInstance; public static DelegationTokenProto getDefaultInstance() { return defaultInstance; } - + public DelegationTokenProto getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; } - - private int bitField0_; + private DelegationTokenProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + identifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + password_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + kind_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + service_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DelegationTokenProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DelegationTokenProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; // optional bytes identifier = 1; public static final int IDENTIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString identifier_; + /** + * optional bytes identifier = 1; + */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes identifier = 1; + */ public com.google.protobuf.ByteString getIdentifier() { return identifier_; } - + // optional bytes password = 2; public static final int PASSWORD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString password_; + /** + * optional bytes password = 2; + */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes password = 2; + */ public com.google.protobuf.ByteString getPassword() { return password_; } - + // optional string kind = 3; public static final int KIND_FIELD_NUMBER = 3; private java.lang.Object kind_; + /** + * optional string kind = 3; + */ public boolean hasKind() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getKind() { + /** + * optional string kind = 3; + */ + public java.lang.String getKind() { java.lang.Object ref = kind_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { kind_ = s; } return s; } } - private com.google.protobuf.ByteString getKindBytes() { + /** + * optional string kind = 3; + */ + public com.google.protobuf.ByteString + getKindBytes() { java.lang.Object ref = kind_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); kind_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + // optional string service = 4; public static final int SERVICE_FIELD_NUMBER = 4; private java.lang.Object service_; + /** + * optional string service = 4; + */ public boolean hasService() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public String getService() { + /** + * optional string service = 4; + */ + public java.lang.String getService() { java.lang.Object ref = service_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { service_ = s; } return s; } } - private com.google.protobuf.ByteString getServiceBytes() { + /** + * optional string service = 4; + */ + public com.google.protobuf.ByteString + getServiceBytes() { java.lang.Object ref = service_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { identifier_ = com.google.protobuf.ByteString.EMPTY; password_ = com.google.protobuf.ByteString.EMPTY; @@ -1462,11 +1938,11 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1484,12 +1960,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1511,14 +1987,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1528,7 +2004,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) obj; - + boolean result = true; result = result && (hasIdentifier() == other.hasIdentifier()); if (hasIdentifier()) { @@ -1554,9 +2030,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIdentifier()) { @@ -1576,89 +2056,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + getService().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code DelegationTokenProto} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder { @@ -1666,18 +2136,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1688,7 +2161,7 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); identifier_ = com.google.protobuf.ByteString.EMPTY; @@ -1701,20 +2174,20 @@ public final class SecureBulkLoadProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = buildPartial(); if (!result.isInitialized()) { @@ -1722,17 +2195,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto(this); int from_bitField0_ = bitField0_; @@ -1757,7 +2220,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto)other); @@ -1766,7 +2229,7 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { @@ -1776,76 +2239,59 @@ public final class SecureBulkLoadProtos { setPassword(other.getPassword()); } if (other.hasKind()) { - setKind(other.getKind()); + bitField0_ |= 0x00000004; + kind_ = other.kind_; + onChanged(); } if (other.hasService()) { - setService(other.getService()); + bitField0_ |= 0x00000008; + service_ = other.service_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - identifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - password_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - kind_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - service_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes identifier = 1; private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes identifier = 1; + */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes identifier = 1; + */ public com.google.protobuf.ByteString getIdentifier() { return identifier_; } + /** + * optional bytes identifier = 1; + */ public Builder setIdentifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1855,21 +2301,33 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * optional bytes identifier = 1; + */ public Builder clearIdentifier() { bitField0_ = (bitField0_ & ~0x00000001); identifier_ = getDefaultInstance().getIdentifier(); onChanged(); return this; } - + // optional bytes password = 2; private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes password = 2; + */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes password = 2; + */ public com.google.protobuf.ByteString getPassword() { return password_; } + /** + * optional bytes password = 2; + */ public Builder setPassword(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1879,29 +2337,59 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * optional bytes password = 2; + */ public Builder clearPassword() { bitField0_ = (bitField0_ & ~0x00000002); password_ = getDefaultInstance().getPassword(); onChanged(); return this; } - + // optional string kind = 3; private java.lang.Object kind_ = ""; + /** + * optional string kind = 3; + */ public boolean hasKind() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public String getKind() { + /** + * optional string kind = 3; + */ + public java.lang.String getKind() { java.lang.Object ref = kind_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); kind_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setKind(String value) { + /** + * optional string kind = 3; + */ + public com.google.protobuf.ByteString + getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string kind = 3; + */ + public Builder setKind( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1910,34 +2398,72 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * optional string kind = 3; + */ public Builder clearKind() { bitField0_ = (bitField0_ & ~0x00000004); kind_ = getDefaultInstance().getKind(); onChanged(); return this; } - void setKind(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; + /** + * optional string kind = 3; + */ + public Builder setKindBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; kind_ = value; onChanged(); + return this; } - + // optional string service = 4; private java.lang.Object service_ = ""; + /** + * optional string service = 4; + */ public boolean hasService() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public String getService() { + /** + * optional string service = 4; + */ + public java.lang.String getService() { java.lang.Object ref = service_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); service_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setService(String value) { + /** + * optional string service = 4; + */ + public com.google.protobuf.ByteString + getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string service = 4; + */ + public Builder setService( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1946,75 +2472,165 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * optional string service = 4; + */ public Builder clearService() { bitField0_ = (bitField0_ & ~0x00000008); service_ = getDefaultInstance().getService(); onChanged(); return this; } - void setService(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000008; + /** + * optional string service = 4; + */ + public Builder setServiceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; service_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:DelegationTokenProto) } - + static { defaultInstance = new DelegationTokenProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:DelegationTokenProto) } - + public interface PrepareBulkLoadRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes tableName = 1; + /** + * required bytes tableName = 1; + */ boolean hasTableName(); + /** + * required bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); } + /** + * Protobuf type {@code PrepareBulkLoadRequest} + */ public static final class PrepareBulkLoadRequest extends com.google.protobuf.GeneratedMessage implements PrepareBulkLoadRequestOrBuilder { // Use PrepareBulkLoadRequest.newBuilder() to construct. - private PrepareBulkLoadRequest(Builder builder) { + private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PrepareBulkLoadRequest(boolean noInit) {} - + private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PrepareBulkLoadRequest defaultInstance; public static PrepareBulkLoadRequest getDefaultInstance() { return defaultInstance; } - + public PrepareBulkLoadRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrepareBulkLoadRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } @@ -2022,7 +2638,7 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasTableName()) { memoizedIsInitialized = 0; return false; @@ -2030,7 +2646,7 @@ public final class SecureBulkLoadProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2039,12 +2655,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2054,14 +2670,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2071,7 +2687,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -2082,9 +2698,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -2092,89 +2712,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PrepareBulkLoadRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequestOrBuilder { @@ -2182,18 +2792,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2204,27 +2817,27 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2232,17 +2845,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest(this); int from_bitField0_ = bitField0_; @@ -2255,7 +2858,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)other); @@ -2264,7 +2867,7 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -2273,7 +2876,7 @@ public final class SecureBulkLoadProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasTableName()) { @@ -2281,49 +2884,43 @@ public final class SecureBulkLoadProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * required bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2333,92 +2930,184 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * required bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:PrepareBulkLoadRequest) } - + static { defaultInstance = new PrepareBulkLoadRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PrepareBulkLoadRequest) } - + public interface PrepareBulkLoadResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string bulkToken = 1; + /** + * required string bulkToken = 1; + */ boolean hasBulkToken(); - String getBulkToken(); + /** + * required string bulkToken = 1; + */ + java.lang.String getBulkToken(); + /** + * required string bulkToken = 1; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); } + /** + * Protobuf type {@code PrepareBulkLoadResponse} + */ public static final class PrepareBulkLoadResponse extends com.google.protobuf.GeneratedMessage implements PrepareBulkLoadResponseOrBuilder { // Use PrepareBulkLoadResponse.newBuilder() to construct. - private PrepareBulkLoadResponse(Builder builder) { + private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PrepareBulkLoadResponse(boolean noInit) {} - + private PrepareBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PrepareBulkLoadResponse defaultInstance; public static PrepareBulkLoadResponse getDefaultInstance() { return defaultInstance; } - + public PrepareBulkLoadResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrepareBulkLoadResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string bulkToken = 1; public static final int BULKTOKEN_FIELD_NUMBER = 1; private java.lang.Object bulkToken_; + /** + * required string bulkToken = 1; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getBulkToken() { + /** + * required string bulkToken = 1; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } - private com.google.protobuf.ByteString getBulkTokenBytes() { + /** + * required string bulkToken = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); bulkToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { bulkToken_ = ""; } @@ -2426,7 +3115,7 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasBulkToken()) { memoizedIsInitialized = 0; return false; @@ -2434,7 +3123,7 @@ public final class SecureBulkLoadProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2443,12 +3132,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2458,14 +3147,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2475,7 +3164,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) obj; - + boolean result = true; result = result && (hasBulkToken() == other.hasBulkToken()); if (hasBulkToken()) { @@ -2486,9 +3175,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBulkToken()) { @@ -2496,89 +3189,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + getBulkToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PrepareBulkLoadResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponseOrBuilder { @@ -2586,18 +3269,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2608,27 +3294,27 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2636,17 +3322,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse(this); int from_bitField0_ = bitField0_; @@ -2659,7 +3335,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse)other); @@ -2668,74 +3344,88 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this; if (other.hasBulkToken()) { - setBulkToken(other.getBulkToken()); + bitField0_ |= 0x00000001; + bulkToken_ = other.bulkToken_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasBulkToken()) { return false; } return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); - break; - } + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string bulkToken = 1; private java.lang.Object bulkToken_ = ""; + /** + * required string bulkToken = 1; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getBulkToken() { + /** + * required string bulkToken = 1; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); bulkToken_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setBulkToken(String value) { + /** + * required string bulkToken = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string bulkToken = 1; + */ + public Builder setBulkToken( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2744,97 +3434,197 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * required string bulkToken = 1; + */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000001); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } - void setBulkToken(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string bulkToken = 1; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:PrepareBulkLoadResponse) } - + static { defaultInstance = new PrepareBulkLoadResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PrepareBulkLoadResponse) } - + public interface CleanupBulkLoadRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string bulkToken = 1; + /** + * required string bulkToken = 1; + */ boolean hasBulkToken(); - String getBulkToken(); + /** + * required string bulkToken = 1; + */ + java.lang.String getBulkToken(); + /** + * required string bulkToken = 1; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); } + /** + * Protobuf type {@code CleanupBulkLoadRequest} + */ public static final class CleanupBulkLoadRequest extends com.google.protobuf.GeneratedMessage implements CleanupBulkLoadRequestOrBuilder { // Use CleanupBulkLoadRequest.newBuilder() to construct. - private CleanupBulkLoadRequest(Builder builder) { + private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CleanupBulkLoadRequest(boolean noInit) {} - + private CleanupBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CleanupBulkLoadRequest defaultInstance; public static CleanupBulkLoadRequest getDefaultInstance() { return defaultInstance; } - + public CleanupBulkLoadRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CleanupBulkLoadRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string bulkToken = 1; public static final int BULKTOKEN_FIELD_NUMBER = 1; private java.lang.Object bulkToken_; + /** + * required string bulkToken = 1; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getBulkToken() { + /** + * required string bulkToken = 1; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } - private com.google.protobuf.ByteString getBulkTokenBytes() { + /** + * required string bulkToken = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { java.lang.Object ref = bulkToken_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); bulkToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { bulkToken_ = ""; } @@ -2842,7 +3632,7 @@ public final class SecureBulkLoadProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasBulkToken()) { memoizedIsInitialized = 0; return false; @@ -2850,7 +3640,7 @@ public final class SecureBulkLoadProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2859,12 +3649,12 @@ public final class SecureBulkLoadProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2874,14 +3664,14 @@ public final class SecureBulkLoadProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2891,7 +3681,7 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) obj; - + boolean result = true; result = result && (hasBulkToken() == other.hasBulkToken()); if (hasBulkToken()) { @@ -2902,9 +3692,13 @@ public final class SecureBulkLoadProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBulkToken()) { @@ -2912,89 +3706,79 @@ public final class SecureBulkLoadProtos { hash = (53 * hash) + getBulkToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CleanupBulkLoadRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequestOrBuilder { @@ -3002,18 +3786,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3024,27 +3811,27 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3052,17 +3839,7 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest(this); int from_bitField0_ = bitField0_; @@ -3075,7 +3852,7 @@ public final class SecureBulkLoadProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)other); @@ -3084,16 +3861,18 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this; if (other.hasBulkToken()) { - setBulkToken(other.getBulkToken()); + bitField0_ |= 0x00000001; + bulkToken_ = other.bulkToken_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasBulkToken()) { @@ -3101,57 +3880,69 @@ public final class SecureBulkLoadProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string bulkToken = 1; private java.lang.Object bulkToken_ = ""; + /** + * required string bulkToken = 1; + */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getBulkToken() { + /** + * required string bulkToken = 1; + */ + public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); bulkToken_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string bulkToken = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setBulkToken(String value) { + /** + * required string bulkToken = 1; + */ + public Builder setBulkToken( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3160,95 +3951,167 @@ public final class SecureBulkLoadProtos { onChanged(); return this; } + /** + * required string bulkToken = 1; + */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000001); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } - void setBulkToken(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string bulkToken = 1; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:CleanupBulkLoadRequest) } - + static { defaultInstance = new CleanupBulkLoadRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CleanupBulkLoadRequest) } - + public interface CleanupBulkLoadResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CleanupBulkLoadResponse} + */ public static final class CleanupBulkLoadResponse extends com.google.protobuf.GeneratedMessage implements CleanupBulkLoadResponseOrBuilder { // Use CleanupBulkLoadResponse.newBuilder() to construct. - private CleanupBulkLoadResponse(Builder builder) { + private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CleanupBulkLoadResponse(boolean noInit) {} - + private CleanupBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CleanupBulkLoadResponse defaultInstance; public static CleanupBulkLoadResponse getDefaultInstance() { return defaultInstance; } - + public CleanupBulkLoadResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CleanupBulkLoadResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3258,101 +4121,95 @@ public final class SecureBulkLoadProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CleanupBulkLoadResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponseOrBuilder { @@ -3360,18 +4217,21 @@ public final class SecureBulkLoadProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3382,25 +4242,25 @@ public final class SecureBulkLoadProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse build() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3408,23 +4268,13 @@ public final class SecureBulkLoadProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse)other); @@ -3433,78 +4283,80 @@ public final class SecureBulkLoadProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CleanupBulkLoadResponse) } - + static { defaultInstance = new CleanupBulkLoadResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CleanupBulkLoadResponse) } - + + /** + * Protobuf service {@code SecureBulkLoadService} + */ public static abstract class SecureBulkLoadService implements com.google.protobuf.Service { protected SecureBulkLoadService() {} - + public interface Interface { + /** + * rpc prepareBulkLoad(.PrepareBulkLoadRequest) returns (.PrepareBulkLoadResponse); + */ public abstract void prepareBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc secureBulkLoadHFiles(.SecureBulkLoadHFilesRequest) returns (.SecureBulkLoadHFilesResponse); + */ public abstract void secureBulkLoadHFiles( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc cleanupBulkLoad(.CleanupBulkLoadRequest) returns (.CleanupBulkLoadResponse); + */ public abstract void cleanupBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new SecureBulkLoadService() { @@ -3515,7 +4367,7 @@ public final class SecureBulkLoadProtos { com.google.protobuf.RpcCallback done) { impl.prepareBulkLoad(controller, request, done); } - + @java.lang.Override public void secureBulkLoadHFiles( com.google.protobuf.RpcController controller, @@ -3523,7 +4375,7 @@ public final class SecureBulkLoadProtos { com.google.protobuf.RpcCallback done) { impl.secureBulkLoadHFiles(controller, request, done); } - + @java.lang.Override public void cleanupBulkLoad( com.google.protobuf.RpcController controller, @@ -3531,10 +4383,10 @@ public final class SecureBulkLoadProtos { com.google.protobuf.RpcCallback done) { impl.cleanupBulkLoad(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -3542,7 +4394,7 @@ public final class SecureBulkLoadProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3564,7 +4416,7 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3584,7 +4436,7 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3604,25 +4456,34 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc prepareBulkLoad(.PrepareBulkLoadRequest) returns (.PrepareBulkLoadResponse); + */ public abstract void prepareBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc secureBulkLoadHFiles(.SecureBulkLoadHFilesRequest) returns (.SecureBulkLoadHFilesResponse); + */ public abstract void secureBulkLoadHFiles( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc cleanupBulkLoad(.CleanupBulkLoadRequest) returns (.CleanupBulkLoadResponse); + */ public abstract void cleanupBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -3632,7 +4493,7 @@ public final class SecureBulkLoadProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3664,7 +4525,7 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3684,7 +4545,7 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3704,23 +4565,23 @@ public final class SecureBulkLoadProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void prepareBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, @@ -3735,7 +4596,7 @@ public final class SecureBulkLoadProtos { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance())); } - + public void secureBulkLoadHFiles( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, @@ -3750,7 +4611,7 @@ public final class SecureBulkLoadProtos { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance())); } - + public void cleanupBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, @@ -3766,36 +4627,36 @@ public final class SecureBulkLoadProtos { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) @@ -3806,8 +4667,8 @@ public final class SecureBulkLoadProtos { request, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) @@ -3818,8 +4679,8 @@ public final class SecureBulkLoadProtos { request, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) @@ -3830,10 +4691,12 @@ public final class SecureBulkLoadProtos { request, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:SecureBulkLoadService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_SecureBulkLoadHFilesRequest_descriptor; private static @@ -3869,7 +4732,7 @@ public final class SecureBulkLoadProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CleanupBulkLoadResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -3911,57 +4774,43 @@ public final class SecureBulkLoadProtos { internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SecureBulkLoadHFilesRequest_descriptor, - new java.lang.String[] { "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); + new java.lang.String[] { "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", }); internal_static_SecureBulkLoadHFilesResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SecureBulkLoadHFilesResponse_descriptor, - new java.lang.String[] { "Loaded", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); + new java.lang.String[] { "Loaded", }); internal_static_DelegationTokenProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_DelegationTokenProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DelegationTokenProto_descriptor, - new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); + new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); internal_static_PrepareBulkLoadRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_PrepareBulkLoadRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PrepareBulkLoadRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); + new java.lang.String[] { "TableName", }); internal_static_PrepareBulkLoadResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_PrepareBulkLoadResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PrepareBulkLoadResponse_descriptor, - new java.lang.String[] { "BulkToken", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); + new java.lang.String[] { "BulkToken", }); internal_static_CleanupBulkLoadRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_CleanupBulkLoadRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CleanupBulkLoadRequest_descriptor, - new java.lang.String[] { "BulkToken", }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); + new java.lang.String[] { "BulkToken", }); internal_static_CleanupBulkLoadResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_CleanupBulkLoadResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CleanupBulkLoadResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -3972,6 +4821,6 @@ public final class SecureBulkLoadProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java index e634f17..4d4bdcd 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java @@ -10,64 +10,168 @@ public final class Tracing { } public interface RPCTInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional int64 traceId = 1; + /** + * optional int64 traceId = 1; + */ boolean hasTraceId(); + /** + * optional int64 traceId = 1; + */ long getTraceId(); - + // optional int64 parentId = 2; + /** + * optional int64 parentId = 2; + */ boolean hasParentId(); + /** + * optional int64 parentId = 2; + */ long getParentId(); } + /** + * Protobuf type {@code RPCTInfo} + * + *
+   *Used to pass through the information necessary to continue
+   *a trace after an RPC is made. All we need is the traceid 
+   *(so we know the overarching trace this message is a part of), and
+   *the id of the current span when this message was sent, so we know 
+   *what span caused the new span we will create when this message is received.
+   * 
+ */ public static final class RPCTInfo extends com.google.protobuf.GeneratedMessage implements RPCTInfoOrBuilder { // Use RPCTInfo.newBuilder() to construct. - private RPCTInfo(Builder builder) { + private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RPCTInfo(boolean noInit) {} - + private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RPCTInfo defaultInstance; public static RPCTInfo getDefaultInstance() { return defaultInstance; } - + public RPCTInfo getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RPCTInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + traceId_ = input.readInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + parentId_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RPCTInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RPCTInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // optional int64 traceId = 1; public static final int TRACEID_FIELD_NUMBER = 1; private long traceId_; + /** + * optional int64 traceId = 1; + */ public boolean hasTraceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional int64 traceId = 1; + */ public long getTraceId() { return traceId_; } - + // optional int64 parentId = 2; public static final int PARENTID_FIELD_NUMBER = 2; private long parentId_; + /** + * optional int64 parentId = 2; + */ public boolean hasParentId() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int64 parentId = 2; + */ public long getParentId() { return parentId_; } - + private void initFields() { traceId_ = 0L; parentId_ = 0L; @@ -76,11 +180,11 @@ public final class Tracing { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -92,12 +196,12 @@ public final class Tracing { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -111,14 +215,14 @@ public final class Tracing { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -128,7 +232,7 @@ public final class Tracing { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj; - + boolean result = true; result = result && (hasTraceId() == other.hasTraceId()); if (hasTraceId()) { @@ -144,9 +248,13 @@ public final class Tracing { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTraceId()) { @@ -158,89 +266,87 @@ public final class Tracing { hash = (53 * hash) + hashLong(getParentId()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RPCTInfo} + * + *
+     *Used to pass through the information necessary to continue
+     *a trace after an RPC is made. All we need is the traceid 
+     *(so we know the overarching trace this message is a part of), and
+     *the id of the current span when this message was sent, so we know 
+     *what span caused the new span we will create when this message is received.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder { @@ -248,18 +354,21 @@ public final class Tracing { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -270,7 +379,7 @@ public final class Tracing { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); traceId_ = 0L; @@ -279,20 +388,20 @@ public final class Tracing { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() { org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); if (!result.isInitialized()) { @@ -300,17 +409,7 @@ public final class Tracing { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this); int from_bitField0_ = bitField0_; @@ -327,7 +426,7 @@ public final class Tracing { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other); @@ -336,7 +435,7 @@ public final class Tracing { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this; if (other.hasTraceId()) { @@ -348,109 +447,113 @@ public final class Tracing { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - traceId_ = input.readInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - parentId_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional int64 traceId = 1; private long traceId_ ; + /** + * optional int64 traceId = 1; + */ public boolean hasTraceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional int64 traceId = 1; + */ public long getTraceId() { return traceId_; } + /** + * optional int64 traceId = 1; + */ public Builder setTraceId(long value) { bitField0_ |= 0x00000001; traceId_ = value; onChanged(); return this; } + /** + * optional int64 traceId = 1; + */ public Builder clearTraceId() { bitField0_ = (bitField0_ & ~0x00000001); traceId_ = 0L; onChanged(); return this; } - + // optional int64 parentId = 2; private long parentId_ ; + /** + * optional int64 parentId = 2; + */ public boolean hasParentId() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional int64 parentId = 2; + */ public long getParentId() { return parentId_; } + /** + * optional int64 parentId = 2; + */ public Builder setParentId(long value) { bitField0_ |= 0x00000002; parentId_ = value; onChanged(); return this; } + /** + * optional int64 parentId = 2; + */ public Builder clearParentId() { bitField0_ = (bitField0_ & ~0x00000002); parentId_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RPCTInfo) } - + static { defaultInstance = new RPCTInfo(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RPCTInfo) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_RPCTInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RPCTInfo_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -474,9 +577,7 @@ public final class Tracing { internal_static_RPCTInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RPCTInfo_descriptor, - new java.lang.String[] { "TraceId", "ParentId", }, - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); + new java.lang.String[] { "TraceId", "ParentId", }); return null; } }; @@ -485,6 +586,6 @@ public final class Tracing { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java index 04b094a..9d7d413 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java @@ -10,54 +10,176 @@ public final class ZooKeeperProtos { } public interface RootRegionServerOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName server = 1; + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ boolean hasServer(); + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); } + /** + * Protobuf type {@code RootRegionServer} + * + *
+   **
+   * Content of the root-region-server znode.
+   * 
+ */ public static final class RootRegionServer extends com.google.protobuf.GeneratedMessage implements RootRegionServerOrBuilder { // Use RootRegionServer.newBuilder() to construct. - private RootRegionServer(Builder builder) { + private RootRegionServer(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RootRegionServer(boolean noInit) {} - + private RootRegionServer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RootRegionServer defaultInstance; public static RootRegionServer getDefaultInstance() { return defaultInstance; } - + public RootRegionServer getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RootRegionServer( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RootRegionServer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RootRegionServer(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { return server_; } + /** + * required .ServerName server = 1; + * + *
+     * The ServerName hosting the root region currently.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_; } - + private void initFields() { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } @@ -65,7 +187,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasServer()) { memoizedIsInitialized = 0; return false; @@ -77,7 +199,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -86,12 +208,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -101,14 +223,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -118,7 +240,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) obj; - + boolean result = true; result = result && (hasServer() == other.hasServer()); if (hasServer()) { @@ -129,9 +251,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasServer()) { @@ -139,89 +265,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getServer().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RootRegionServer} + * + *
+     **
+     * Content of the root-region-server znode.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServerOrBuilder { @@ -229,18 +350,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -252,7 +376,7 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (serverBuilder_ == null) { @@ -263,20 +387,20 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); if (!result.isInitialized()) { @@ -284,17 +408,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer(this); int from_bitField0_ = bitField0_; @@ -311,7 +425,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer)other); @@ -320,7 +434,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance()) return this; if (other.hasServer()) { @@ -329,7 +443,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasServer()) { @@ -341,52 +455,47 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName server = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_; @@ -394,6 +503,13 @@ public final class ZooKeeperProtos { return serverBuilder_.getMessage(); } } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { @@ -407,6 +523,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public Builder setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { @@ -418,6 +541,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -434,6 +564,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -444,11 +581,25 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerFieldBuilder().getBuilder(); } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); @@ -456,6 +607,13 @@ public final class ZooKeeperProtos { return server_; } } + /** + * required .ServerName server = 1; + * + *
+       * The ServerName hosting the root region currently.
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { @@ -469,68 +627,190 @@ public final class ZooKeeperProtos { } return serverBuilder_; } - + // @@protoc_insertion_point(builder_scope:RootRegionServer) } - + static { defaultInstance = new RootRegionServer(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RootRegionServer) } - + public interface MasterOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ServerName master = 1; + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ boolean hasMaster(); + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); } + /** + * Protobuf type {@code Master} + * + *
+   **
+   * Content of the master znode.
+   * 
+ */ public static final class Master extends com.google.protobuf.GeneratedMessage implements MasterOrBuilder { // Use Master.newBuilder() to construct. - private Master(Builder builder) { + private Master(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Master(boolean noInit) {} - + private Master(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Master defaultInstance; public static Master getDefaultInstance() { return defaultInstance; } - + public Master getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Master( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = master_.toBuilder(); + } + master_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(master_); + master_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Master parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Master(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required .ServerName master = 1; public static final int MASTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { return master_; } + /** + * required .ServerName master = 1; + * + *
+     * The ServerName of the current Master
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { return master_; } - + private void initFields() { master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } @@ -538,7 +818,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasMaster()) { memoizedIsInitialized = 0; return false; @@ -550,7 +830,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -559,12 +839,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -574,14 +854,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -591,7 +871,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) obj; - + boolean result = true; result = result && (hasMaster() == other.hasMaster()); if (hasMaster()) { @@ -602,9 +882,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMaster()) { @@ -612,89 +896,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getMaster().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Master} + * + *
+     **
+     * Content of the master znode.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { @@ -702,18 +981,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -725,7 +1007,7 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); if (masterBuilder_ == null) { @@ -736,20 +1018,20 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); if (!result.isInitialized()) { @@ -757,17 +1039,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master(this); int from_bitField0_ = bitField0_; @@ -784,7 +1056,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)other); @@ -793,7 +1065,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance()) return this; if (other.hasMaster()) { @@ -802,7 +1074,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasMaster()) { @@ -814,52 +1086,47 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasMaster()) { - subBuilder.mergeFrom(getMaster()); - } - input.readMessage(subBuilder, extensionRegistry); - setMaster(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ServerName master = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { if (masterBuilder_ == null) { return master_; @@ -867,6 +1134,13 @@ public final class ZooKeeperProtos { return masterBuilder_.getMessage(); } } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (value == null) { @@ -880,6 +1154,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public Builder setMaster( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (masterBuilder_ == null) { @@ -891,6 +1172,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && @@ -907,6 +1195,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000001; return this; } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public Builder clearMaster() { if (masterBuilder_ == null) { master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -917,11 +1212,25 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMasterFieldBuilder().getBuilder(); } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { if (masterBuilder_ != null) { return masterBuilder_.getMessageOrBuilder(); @@ -929,6 +1238,13 @@ public final class ZooKeeperProtos { return master_; } } + /** + * required .ServerName master = 1; + * + *
+       * The ServerName of the current Master
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getMasterFieldBuilder() { @@ -942,86 +1258,210 @@ public final class ZooKeeperProtos { } return masterBuilder_; } - + // @@protoc_insertion_point(builder_scope:Master) } - + static { defaultInstance = new Master(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Master) } - + public interface ClusterUpOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string startDate = 1; + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ boolean hasStartDate(); - String getStartDate(); + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ + java.lang.String getStartDate(); + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ + com.google.protobuf.ByteString + getStartDateBytes(); } + /** + * Protobuf type {@code ClusterUp} + * + *
+   **
+   * Content of the '/hbase/running', cluster state, znode.
+   * 
+ */ public static final class ClusterUp extends com.google.protobuf.GeneratedMessage implements ClusterUpOrBuilder { // Use ClusterUp.newBuilder() to construct. - private ClusterUp(Builder builder) { + private ClusterUp(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ClusterUp(boolean noInit) {} - + private ClusterUp(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ClusterUp defaultInstance; public static ClusterUp getDefaultInstance() { return defaultInstance; } - + public ClusterUp getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClusterUp( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + startDate_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ClusterUp parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterUp(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string startDate = 1; public static final int STARTDATE_FIELD_NUMBER = 1; private java.lang.Object startDate_; + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getStartDate() { + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ + public java.lang.String getStartDate() { java.lang.Object ref = startDate_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { startDate_ = s; } return s; } } - private com.google.protobuf.ByteString getStartDateBytes() { + /** + * required string startDate = 1; + * + *
+     * If this znode is present, cluster is up.  Currently
+     * the data is cluster startDate.
+     * 
+ */ + public com.google.protobuf.ByteString + getStartDateBytes() { java.lang.Object ref = startDate_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); startDate_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { startDate_ = ""; } @@ -1029,7 +1469,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasStartDate()) { memoizedIsInitialized = 0; return false; @@ -1037,7 +1477,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1046,12 +1486,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1061,14 +1501,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1078,7 +1518,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) obj; - + boolean result = true; result = result && (hasStartDate() == other.hasStartDate()); if (hasStartDate()) { @@ -1089,9 +1529,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasStartDate()) { @@ -1099,89 +1543,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getStartDate().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ClusterUp} + * + *
+     **
+     * Content of the '/hbase/running', cluster state, znode.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { @@ -1189,18 +1628,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1211,27 +1653,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); startDate_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); if (!result.isInitialized()) { @@ -1239,17 +1681,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp(this); int from_bitField0_ = bitField0_; @@ -1262,7 +1694,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)other); @@ -1271,16 +1703,18 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance()) return this; if (other.hasStartDate()) { - setStartDate(other.getStartDate()); + bitField0_ |= 0x00000001; + startDate_ = other.startDate_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasStartDate()) { @@ -1288,57 +1722,89 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - startDate_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string startDate = 1; private java.lang.Object startDate_ = ""; + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getStartDate() { + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ + public java.lang.String getStartDate() { java.lang.Object ref = startDate_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); startDate_ = s; return s; } else { - return (String) ref; - } - } - public Builder setStartDate(String value) { + return (java.lang.String) ref; + } + } + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ + public com.google.protobuf.ByteString + getStartDateBytes() { + java.lang.Object ref = startDate_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + startDate_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ + public Builder setStartDate( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1347,135 +1813,379 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ public Builder clearStartDate() { bitField0_ = (bitField0_ & ~0x00000001); startDate_ = getDefaultInstance().getStartDate(); onChanged(); return this; } - void setStartDate(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string startDate = 1; + * + *
+       * If this znode is present, cluster is up.  Currently
+       * the data is cluster startDate.
+       * 
+ */ + public Builder setStartDateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; startDate_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ClusterUp) } - + static { defaultInstance = new ClusterUp(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ClusterUp) } - + public interface RegionTransitionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required uint32 eventTypeCode = 1; + /** + * required uint32 eventTypeCode = 1; + * + *
+     * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+     * 
+ */ boolean hasEventTypeCode(); + /** + * required uint32 eventTypeCode = 1; + * + *
+     * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+     * 
+ */ int getEventTypeCode(); - + // required bytes regionName = 2; + /** + * required bytes regionName = 2; + * + *
+     * Full regionname in bytes
+     * 
+ */ boolean hasRegionName(); + /** + * required bytes regionName = 2; + * + *
+     * Full regionname in bytes
+     * 
+ */ com.google.protobuf.ByteString getRegionName(); - + // required uint64 createTime = 3; + /** + * required uint64 createTime = 3; + */ boolean hasCreateTime(); + /** + * required uint64 createTime = 3; + */ long getCreateTime(); - + // required .ServerName serverName = 4; + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ boolean hasServerName(); + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - + // optional bytes payload = 5; + /** + * optional bytes payload = 5; + */ boolean hasPayload(); + /** + * optional bytes payload = 5; + */ com.google.protobuf.ByteString getPayload(); } + /** + * Protobuf type {@code RegionTransition} + * + *
+   **
+   * What we write under unassigned up in zookeeper as a region moves through
+   * open/close, etc., regions.  Details a region in transition.
+   * 
+ */ public static final class RegionTransition extends com.google.protobuf.GeneratedMessage implements RegionTransitionOrBuilder { // Use RegionTransition.newBuilder() to construct. - private RegionTransition(Builder builder) { + private RegionTransition(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RegionTransition(boolean noInit) {} - + private RegionTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RegionTransition defaultInstance; public static RegionTransition getDefaultInstance() { return defaultInstance; } - + public RegionTransition getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionTransition( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + eventTypeCode_ = input.readUInt32(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + regionName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + createTime_ = input.readUInt64(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + subBuilder = serverName_.toBuilder(); + } + serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverName_); + serverName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000008; + break; + } + case 42: { + bitField0_ |= 0x00000010; + payload_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionTransition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionTransition(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required uint32 eventTypeCode = 1; public static final int EVENTTYPECODE_FIELD_NUMBER = 1; private int eventTypeCode_; + /** + * required uint32 eventTypeCode = 1; + * + *
+     * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+     * 
+ */ public boolean hasEventTypeCode() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint32 eventTypeCode = 1; + * + *
+     * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+     * 
+ */ public int getEventTypeCode() { return eventTypeCode_; } - + // required bytes regionName = 2; public static final int REGIONNAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString regionName_; + /** + * required bytes regionName = 2; + * + *
+     * Full regionname in bytes
+     * 
+ */ public boolean hasRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes regionName = 2; + * + *
+     * Full regionname in bytes
+     * 
+ */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - + // required uint64 createTime = 3; public static final int CREATETIME_FIELD_NUMBER = 3; private long createTime_; + /** + * required uint64 createTime = 3; + */ public boolean hasCreateTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 createTime = 3; + */ public long getCreateTime() { return createTime_; } - + // required .ServerName serverName = 4; public static final int SERVERNAME_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ public boolean hasServerName() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { return serverName_; } + /** + * required .ServerName serverName = 4; + * + *
+     * The region server where the transition will happen or is happening
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { return serverName_; } - + // optional bytes payload = 5; public static final int PAYLOAD_FIELD_NUMBER = 5; private com.google.protobuf.ByteString payload_; + /** + * optional bytes payload = 5; + */ public boolean hasPayload() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bytes payload = 5; + */ public com.google.protobuf.ByteString getPayload() { return payload_; } - + private void initFields() { eventTypeCode_ = 0; regionName_ = com.google.protobuf.ByteString.EMPTY; @@ -1487,7 +2197,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasEventTypeCode()) { memoizedIsInitialized = 0; return false; @@ -1511,7 +2221,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1532,12 +2242,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1563,14 +2273,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1580,7 +2290,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) obj; - + boolean result = true; result = result && (hasEventTypeCode() == other.hasEventTypeCode()); if (hasEventTypeCode()) { @@ -1611,9 +2321,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEventTypeCode()) { @@ -1637,89 +2351,85 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getPayload().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RegionTransition} + * + *
+     **
+     * What we write under unassigned up in zookeeper as a region moves through
+     * open/close, etc., regions.  Details a region in transition.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransitionOrBuilder { @@ -1727,18 +2437,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1750,7 +2463,7 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); eventTypeCode_ = 0; @@ -1769,20 +2482,20 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); if (!result.isInitialized()) { @@ -1790,17 +2503,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition(this); int from_bitField0_ = bitField0_; @@ -1833,7 +2536,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)other); @@ -1842,7 +2545,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance()) return this; if (other.hasEventTypeCode()) { @@ -1863,7 +2566,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasEventTypeCode()) { @@ -1887,94 +2590,104 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - eventTypeCode_ = input.readUInt32(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - regionName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - createTime_ = input.readUInt64(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - case 42: { - bitField0_ |= 0x00000010; - payload_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required uint32 eventTypeCode = 1; private int eventTypeCode_ ; + /** + * required uint32 eventTypeCode = 1; + * + *
+       * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+       * 
+ */ public boolean hasEventTypeCode() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required uint32 eventTypeCode = 1; + * + *
+       * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+       * 
+ */ public int getEventTypeCode() { return eventTypeCode_; } + /** + * required uint32 eventTypeCode = 1; + * + *
+       * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+       * 
+ */ public Builder setEventTypeCode(int value) { bitField0_ |= 0x00000001; eventTypeCode_ = value; onChanged(); return this; } + /** + * required uint32 eventTypeCode = 1; + * + *
+       * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
+       * 
+ */ public Builder clearEventTypeCode() { bitField0_ = (bitField0_ & ~0x00000001); eventTypeCode_ = 0; onChanged(); return this; } - + // required bytes regionName = 2; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes regionName = 2; + * + *
+       * Full regionname in bytes
+       * 
+ */ public boolean hasRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes regionName = 2; + * + *
+       * Full regionname in bytes
+       * 
+ */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } + /** + * required bytes regionName = 2; + * + *
+       * Full regionname in bytes
+       * 
+ */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1984,41 +2697,74 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required bytes regionName = 2; + * + *
+       * Full regionname in bytes
+       * 
+ */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000002); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } - + // required uint64 createTime = 3; private long createTime_ ; + /** + * required uint64 createTime = 3; + */ public boolean hasCreateTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * required uint64 createTime = 3; + */ public long getCreateTime() { return createTime_; } + /** + * required uint64 createTime = 3; + */ public Builder setCreateTime(long value) { bitField0_ |= 0x00000004; createTime_ = value; onChanged(); return this; } + /** + * required uint64 createTime = 3; + */ public Builder clearCreateTime() { bitField0_ = (bitField0_ & ~0x00000004); createTime_ = 0L; onChanged(); return this; } - + // required .ServerName serverName = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public boolean hasServerName() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { return serverName_; @@ -2026,6 +2772,13 @@ public final class ZooKeeperProtos { return serverNameBuilder_.getMessage(); } } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (value == null) { @@ -2039,6 +2792,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public Builder setServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverNameBuilder_ == null) { @@ -2050,6 +2810,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && @@ -2066,6 +2833,13 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000008; return this; } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public Builder clearServerName() { if (serverNameBuilder_ == null) { serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -2076,11 +2850,25 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000008); return this; } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { bitField0_ |= 0x00000008; onChanged(); return getServerNameFieldBuilder().getBuilder(); } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); @@ -2088,6 +2876,13 @@ public final class ZooKeeperProtos { return serverName_; } } + /** + * required .ServerName serverName = 4; + * + *
+       * The region server where the transition will happen or is happening
+       * 
+ */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { @@ -2101,15 +2896,24 @@ public final class ZooKeeperProtos { } return serverNameBuilder_; } - + // optional bytes payload = 5; private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes payload = 5; + */ public boolean hasPayload() { return ((bitField0_ & 0x00000010) == 0x00000010); } + /** + * optional bytes payload = 5; + */ public com.google.protobuf.ByteString getPayload() { return payload_; } + /** + * optional bytes payload = 5; + */ public Builder setPayload(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2119,82 +2923,225 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * optional bytes payload = 5; + */ public Builder clearPayload() { bitField0_ = (bitField0_ & ~0x00000010); payload_ = getDefaultInstance().getPayload(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RegionTransition) } - + static { defaultInstance = new RegionTransition(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RegionTransition) } - + public interface SplitLogTaskOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .SplitLogTask.State state = 1; + /** + * required .SplitLogTask.State state = 1; + */ boolean hasState(); + /** + * required .SplitLogTask.State state = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState(); - + // required .ServerName serverName = 2; + /** + * required .ServerName serverName = 2; + */ boolean hasServerName(); + /** + * required .ServerName serverName = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + /** + * required .ServerName serverName = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); } + /** + * Protobuf type {@code SplitLogTask} + * + *
+   **
+   * WAL SplitLog directory znodes have this for content.  Used doing distributed
+   * WAL splitting.  Holds current state and name of server that originated split.
+   * 
+ */ public static final class SplitLogTask extends com.google.protobuf.GeneratedMessage implements SplitLogTaskOrBuilder { // Use SplitLogTask.newBuilder() to construct. - private SplitLogTask(Builder builder) { + private SplitLogTask(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SplitLogTask(boolean noInit) {} - + private SplitLogTask(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SplitLogTask defaultInstance; public static SplitLogTask getDefaultInstance() { return defaultInstance; } - + public SplitLogTask getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SplitLogTask( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = serverName_.toBuilder(); + } + serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverName_); + serverName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SplitLogTask parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitLogTask(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code SplitLogTask.State} + */ public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * UNASSIGNED = 0; + */ UNASSIGNED(0, 0), + /** + * OWNED = 1; + */ OWNED(1, 1), + /** + * RESIGNED = 2; + */ RESIGNED(2, 2), + /** + * DONE = 3; + */ DONE(3, 3), + /** + * ERR = 4; + */ ERR(4, 4), ; - + + /** + * UNASSIGNED = 0; + */ public static final int UNASSIGNED_VALUE = 0; + /** + * OWNED = 1; + */ public static final int OWNED_VALUE = 1; + /** + * RESIGNED = 2; + */ public static final int RESIGNED_VALUE = 2; + /** + * DONE = 3; + */ public static final int DONE_VALUE = 3; + /** + * ERR = 4; + */ public static final int ERR_VALUE = 4; - - + + public final int getNumber() { return value; } - + public static State valueOf(int value) { switch (value) { case 0: return UNASSIGNED; @@ -2205,7 +3152,7 @@ public final class ZooKeeperProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -2217,7 +3164,7 @@ public final class ZooKeeperProtos { return State.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -2230,11 +3177,9 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(0); } - - private static final State[] VALUES = { - UNASSIGNED, OWNED, RESIGNED, DONE, ERR, - }; - + + private static final State[] VALUES = values(); + public static State valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -2243,42 +3188,57 @@ public final class ZooKeeperProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private State(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:SplitLogTask.State) } - + private int bitField0_; // required .SplitLogTask.State state = 1; public static final int STATE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_; + /** + * required .SplitLogTask.State state = 1; + */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SplitLogTask.State state = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { return state_; } - + // required .ServerName serverName = 2; public static final int SERVERNAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + /** + * required .ServerName serverName = 2; + */ public boolean hasServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ServerName serverName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { return serverName_; } + /** + * required .ServerName serverName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { return serverName_; } - + private void initFields() { state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -2287,7 +3247,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasState()) { memoizedIsInitialized = 0; return false; @@ -2303,7 +3263,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2315,12 +3275,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2334,14 +3294,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2351,7 +3311,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) obj; - + boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { @@ -2367,9 +3327,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { @@ -2381,89 +3345,85 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getServerName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SplitLogTask} + * + *
+     **
+     * WAL SplitLog directory znodes have this for content.  Used doing distributed
+     * WAL splitting.  Holds current state and name of server that originated split.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { @@ -2471,18 +3431,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2494,7 +3457,7 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; @@ -2507,20 +3470,20 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); if (!result.isInitialized()) { @@ -2528,17 +3491,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask(this); int from_bitField0_ = bitField0_; @@ -2559,7 +3512,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)other); @@ -2568,7 +3521,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance()) return this; if (other.hasState()) { @@ -2580,7 +3533,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasState()) { @@ -2596,64 +3549,43 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .SplitLogTask.State state = 1; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + /** + * required .SplitLogTask.State state = 1; + */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .SplitLogTask.State state = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { return state_; } + /** + * required .SplitLogTask.State state = 1; + */ public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value) { if (value == null) { throw new NullPointerException(); @@ -2663,20 +3595,29 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required .SplitLogTask.State state = 1; + */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; onChanged(); return this; } - + // required .ServerName serverName = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + /** + * required .ServerName serverName = 2; + */ public boolean hasServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required .ServerName serverName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { return serverName_; @@ -2684,6 +3625,9 @@ public final class ZooKeeperProtos { return serverNameBuilder_.getMessage(); } } + /** + * required .ServerName serverName = 2; + */ public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (value == null) { @@ -2697,6 +3641,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerName serverName = 2; + */ public Builder setServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverNameBuilder_ == null) { @@ -2708,6 +3655,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerName serverName = 2; + */ public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -2724,6 +3674,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * required .ServerName serverName = 2; + */ public Builder clearServerName() { if (serverNameBuilder_ == null) { serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -2734,11 +3687,17 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * required .ServerName serverName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getServerNameFieldBuilder().getBuilder(); } + /** + * required .ServerName serverName = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); @@ -2746,6 +3705,9 @@ public final class ZooKeeperProtos { return serverName_; } } + /** + * required .ServerName serverName = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { @@ -2759,69 +3721,196 @@ public final class ZooKeeperProtos { } return serverNameBuilder_; } - + // @@protoc_insertion_point(builder_scope:SplitLogTask) } - + static { defaultInstance = new SplitLogTask(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SplitLogTask) } - + public interface TableOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .Table.State state = 1 [default = ENABLED]; + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+     * This is the table's state.  If no znode for a table,
+     * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+     * for more.
+     * 
+ */ boolean hasState(); + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+     * This is the table's state.  If no znode for a table,
+     * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+     * for more.
+     * 
+ */ org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState(); } + /** + * Protobuf type {@code Table} + * + *
+   **
+   * The znode that holds state of table.
+   * 
+ */ public static final class Table extends com.google.protobuf.GeneratedMessage implements TableOrBuilder { // Use Table.newBuilder() to construct. - private Table(Builder builder) { + private Table(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private Table(boolean noInit) {} - + private Table(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Table defaultInstance; public static Table getDefaultInstance() { return defaultInstance; } - + public Table getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Table( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser
() { + public Table parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Table(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser
getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code Table.State} + * + *
+     * Table's current state
+     * 
+ */ public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * ENABLED = 0; + */ ENABLED(0, 0), + /** + * DISABLED = 1; + */ DISABLED(1, 1), + /** + * DISABLING = 2; + */ DISABLING(2, 2), + /** + * ENABLING = 3; + */ ENABLING(3, 3), ; - + + /** + * ENABLED = 0; + */ public static final int ENABLED_VALUE = 0; + /** + * DISABLED = 1; + */ public static final int DISABLED_VALUE = 1; + /** + * DISABLING = 2; + */ public static final int DISABLING_VALUE = 2; + /** + * ENABLING = 3; + */ public static final int ENABLING_VALUE = 3; - - + + public final int getNumber() { return value; } - + public static State valueOf(int value) { switch (value) { case 0: return ENABLED; @@ -2831,7 +3920,7 @@ public final class ZooKeeperProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -2843,7 +3932,7 @@ public final class ZooKeeperProtos { return State.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -2856,11 +3945,9 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor().getEnumTypes().get(0); } - - private static final State[] VALUES = { - ENABLED, DISABLED, DISABLING, ENABLING, - }; - + + private static final State[] VALUES = values(); + public static State valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -2869,29 +3956,47 @@ public final class ZooKeeperProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private State(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:Table.State) } - + private int bitField0_; // required .Table.State state = 1 [default = ENABLED]; public static final int STATE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_; + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+     * This is the table's state.  If no znode for a table,
+     * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+     * for more.
+     * 
+ */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+     * This is the table's state.  If no znode for a table,
+     * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+     * for more.
+     * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { return state_; } - + private void initFields() { state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; } @@ -2899,7 +4004,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasState()) { memoizedIsInitialized = 0; return false; @@ -2907,7 +4012,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2916,12 +4021,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2931,14 +4036,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2948,7 +4053,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) obj; - + boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { @@ -2959,9 +4064,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { @@ -2969,89 +4078,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + hashEnum(getState()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code Table} + * + *
+     **
+     * The znode that holds state of table.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableOrBuilder { @@ -3059,18 +4163,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3081,27 +4188,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); if (!result.isInitialized()) { @@ -3109,17 +4216,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table(this); int from_bitField0_ = bitField0_; @@ -3132,7 +4229,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)other); @@ -3141,7 +4238,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance()) return this; if (other.hasState()) { @@ -3150,7 +4247,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasState()) { @@ -3158,55 +4255,61 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .Table.State state = 1 [default = ENABLED]; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+       * This is the table's state.  If no znode for a table,
+       * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+       * for more.
+       * 
+ */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+       * This is the table's state.  If no znode for a table,
+       * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+       * for more.
+       * 
+ */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { return state_; } + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+       * This is the table's state.  If no znode for a table,
+       * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+       * for more.
+       * 
+ */ public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value) { if (value == null) { throw new NullPointerException(); @@ -3216,92 +4319,225 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required .Table.State state = 1 [default = ENABLED]; + * + *
+       * This is the table's state.  If no znode for a table,
+       * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
+       * for more.
+       * 
+ */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:Table) } - + static { defaultInstance = new Table(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:Table) } - + public interface ReplicationPeerOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string clusterkey = 1; + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ boolean hasClusterkey(); - String getClusterkey(); + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ + java.lang.String getClusterkey(); + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ + com.google.protobuf.ByteString + getClusterkeyBytes(); } + /** + * Protobuf type {@code ReplicationPeer} + * + *
+   **
+   * Used by replication. Holds a replication peer key.
+   * 
+ */ public static final class ReplicationPeer extends com.google.protobuf.GeneratedMessage implements ReplicationPeerOrBuilder { // Use ReplicationPeer.newBuilder() to construct. - private ReplicationPeer(Builder builder) { + private ReplicationPeer(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicationPeer(boolean noInit) {} - + private ReplicationPeer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicationPeer defaultInstance; public static ReplicationPeer getDefaultInstance() { return defaultInstance; } - + public ReplicationPeer getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicationPeer( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + clusterkey_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicationPeer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationPeer(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string clusterkey = 1; public static final int CLUSTERKEY_FIELD_NUMBER = 1; private java.lang.Object clusterkey_; + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ public boolean hasClusterkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClusterkey() { + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ + public java.lang.String getClusterkey() { java.lang.Object ref = clusterkey_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { clusterkey_ = s; } return s; } } - private com.google.protobuf.ByteString getClusterkeyBytes() { + /** + * required string clusterkey = 1; + * + *
+     * clusterKey is the concatenation of the slave cluster's
+     * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+     * 
+ */ + public com.google.protobuf.ByteString + getClusterkeyBytes() { java.lang.Object ref = clusterkey_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); clusterkey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { clusterkey_ = ""; } @@ -3309,7 +4545,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasClusterkey()) { memoizedIsInitialized = 0; return false; @@ -3317,7 +4553,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3326,12 +4562,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3341,14 +4577,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3358,7 +4594,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) obj; - + boolean result = true; result = result && (hasClusterkey() == other.hasClusterkey()); if (hasClusterkey()) { @@ -3369,9 +4605,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClusterkey()) { @@ -3379,89 +4619,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getClusterkey().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicationPeer} + * + *
+     **
+     * Used by replication. Holds a replication peer key.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder { @@ -3469,18 +4704,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3491,27 +4729,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); clusterkey_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); if (!result.isInitialized()) { @@ -3519,17 +4757,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer(this); int from_bitField0_ = bitField0_; @@ -3542,7 +4770,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)other); @@ -3551,16 +4779,18 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance()) return this; if (other.hasClusterkey()) { - setClusterkey(other.getClusterkey()); + bitField0_ |= 0x00000001; + clusterkey_ = other.clusterkey_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasClusterkey()) { @@ -3568,57 +4798,89 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - clusterkey_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string clusterkey = 1; private java.lang.Object clusterkey_ = ""; + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ public boolean hasClusterkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getClusterkey() { + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ + public java.lang.String getClusterkey() { java.lang.Object ref = clusterkey_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); clusterkey_ = s; return s; } else { - return (String) ref; - } - } - public Builder setClusterkey(String value) { + return (java.lang.String) ref; + } + } + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ + public com.google.protobuf.ByteString + getClusterkeyBytes() { + java.lang.Object ref = clusterkey_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterkey_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ + public Builder setClusterkey( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -3627,76 +4889,196 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ public Builder clearClusterkey() { bitField0_ = (bitField0_ & ~0x00000001); clusterkey_ = getDefaultInstance().getClusterkey(); onChanged(); return this; } - void setClusterkey(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string clusterkey = 1; + * + *
+       * clusterKey is the concatenation of the slave cluster's
+       * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+       * 
+ */ + public Builder setClusterkeyBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; clusterkey_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ReplicationPeer) } - + static { defaultInstance = new ReplicationPeer(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicationPeer) } - + public interface ReplicationStateOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required .ReplicationState.State state = 1; + /** + * required .ReplicationState.State state = 1; + */ boolean hasState(); + /** + * required .ReplicationState.State state = 1; + */ org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState(); } + /** + * Protobuf type {@code ReplicationState} + * + *
+   **
+   * Used by replication. Holds whether enabled or disabled
+   * 
+ */ public static final class ReplicationState extends com.google.protobuf.GeneratedMessage implements ReplicationStateOrBuilder { // Use ReplicationState.newBuilder() to construct. - private ReplicationState(Builder builder) { + private ReplicationState(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicationState(boolean noInit) {} - + private ReplicationState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicationState defaultInstance; public static ReplicationState getDefaultInstance() { return defaultInstance; } - + public ReplicationState getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicationState( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicationState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationState(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + + /** + * Protobuf enum {@code ReplicationState.State} + */ public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * ENABLED = 0; + */ ENABLED(0, 0), + /** + * DISABLED = 1; + */ DISABLED(1, 1), ; - + + /** + * ENABLED = 0; + */ public static final int ENABLED_VALUE = 0; + /** + * DISABLED = 1; + */ public static final int DISABLED_VALUE = 1; - - + + public final int getNumber() { return value; } - + public static State valueOf(int value) { switch (value) { case 0: return ENABLED; @@ -3704,7 +5086,7 @@ public final class ZooKeeperProtos { default: return null; } } - + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; @@ -3716,7 +5098,7 @@ public final class ZooKeeperProtos { return State.valueOf(number); } }; - + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); @@ -3729,11 +5111,9 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor().getEnumTypes().get(0); } - - private static final State[] VALUES = { - ENABLED, DISABLED, - }; - + + private static final State[] VALUES = values(); + public static State valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { @@ -3742,29 +5122,35 @@ public final class ZooKeeperProtos { } return VALUES[desc.getIndex()]; } - + private final int index; private final int value; - + private State(int index, int value) { this.index = index; this.value = value; } - + // @@protoc_insertion_point(enum_scope:ReplicationState.State) } - + private int bitField0_; // required .ReplicationState.State state = 1; public static final int STATE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_; + /** + * required .ReplicationState.State state = 1; + */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ReplicationState.State state = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { return state_; } - + private void initFields() { state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; } @@ -3772,7 +5158,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasState()) { memoizedIsInitialized = 0; return false; @@ -3780,7 +5166,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -3789,12 +5175,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -3804,14 +5190,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3821,7 +5207,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) obj; - + boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { @@ -3832,9 +5218,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { @@ -3842,89 +5232,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + hashEnum(getState()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicationState} + * + *
+     **
+     * Used by replication. Holds whether enabled or disabled
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder { @@ -3932,18 +5317,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3954,27 +5342,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); if (!result.isInitialized()) { @@ -3982,17 +5370,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState(this); int from_bitField0_ = bitField0_; @@ -4005,7 +5383,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)other); @@ -4014,7 +5392,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance()) return this; if (other.hasState()) { @@ -4023,7 +5401,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasState()) { @@ -4031,55 +5409,43 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required .ReplicationState.State state = 1; private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + /** + * required .ReplicationState.State state = 1; + */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required .ReplicationState.State state = 1; + */ public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { return state_; } + /** + * required .ReplicationState.State state = 1; + */ public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value) { if (value == null) { throw new NullPointerException(); @@ -4089,70 +5455,157 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required .ReplicationState.State state = 1; + */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ReplicationState) } - + static { defaultInstance = new ReplicationState(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicationState) } - + public interface ReplicationHLogPositionOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int64 position = 1; + /** + * required int64 position = 1; + */ boolean hasPosition(); + /** + * required int64 position = 1; + */ long getPosition(); } + /** + * Protobuf type {@code ReplicationHLogPosition} + * + *
+   **
+   * Used by replication. Holds the current position in an HLog file.
+   * 
+ */ public static final class ReplicationHLogPosition extends com.google.protobuf.GeneratedMessage implements ReplicationHLogPositionOrBuilder { // Use ReplicationHLogPosition.newBuilder() to construct. - private ReplicationHLogPosition(Builder builder) { + private ReplicationHLogPosition(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicationHLogPosition(boolean noInit) {} - + private ReplicationHLogPosition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicationHLogPosition defaultInstance; public static ReplicationHLogPosition getDefaultInstance() { return defaultInstance; } - + public ReplicationHLogPosition getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicationHLogPosition( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + position_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicationHLogPosition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationHLogPosition(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required int64 position = 1; public static final int POSITION_FIELD_NUMBER = 1; private long position_; + /** + * required int64 position = 1; + */ public boolean hasPosition() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 position = 1; + */ public long getPosition() { return position_; } - + private void initFields() { position_ = 0L; } @@ -4160,7 +5613,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPosition()) { memoizedIsInitialized = 0; return false; @@ -4168,7 +5621,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4177,12 +5630,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4192,14 +5645,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4209,7 +5662,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) obj; - + boolean result = true; result = result && (hasPosition() == other.hasPosition()); if (hasPosition()) { @@ -4220,9 +5673,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPosition()) { @@ -4230,89 +5687,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + hashLong(getPosition()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicationHLogPosition} + * + *
+     **
+     * Used by replication. Holds the current position in an HLog file.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder { @@ -4320,18 +5772,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4342,27 +5797,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); position_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); if (!result.isInitialized()) { @@ -4370,17 +5825,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition(this); int from_bitField0_ = bitField0_; @@ -4393,7 +5838,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)other); @@ -4402,7 +5847,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance()) return this; if (other.hasPosition()) { @@ -4411,7 +5856,7 @@ public final class ZooKeeperProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPosition()) { @@ -4419,141 +5864,232 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - position_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int64 position = 1; private long position_ ; + /** + * required int64 position = 1; + */ public boolean hasPosition() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 position = 1; + */ public long getPosition() { return position_; } + /** + * required int64 position = 1; + */ public Builder setPosition(long value) { bitField0_ |= 0x00000001; position_ = value; onChanged(); return this; } + /** + * required int64 position = 1; + */ public Builder clearPosition() { bitField0_ = (bitField0_ & ~0x00000001); position_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:ReplicationHLogPosition) } - + static { defaultInstance = new ReplicationHLogPosition(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicationHLogPosition) } - + public interface ReplicationLockOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string lockOwner = 1; + /** + * required string lockOwner = 1; + */ boolean hasLockOwner(); - String getLockOwner(); + /** + * required string lockOwner = 1; + */ + java.lang.String getLockOwner(); + /** + * required string lockOwner = 1; + */ + com.google.protobuf.ByteString + getLockOwnerBytes(); } + /** + * Protobuf type {@code ReplicationLock} + * + *
+   **
+   * Used by replication. Used to lock a region server during failover.
+   * 
+ */ public static final class ReplicationLock extends com.google.protobuf.GeneratedMessage implements ReplicationLockOrBuilder { // Use ReplicationLock.newBuilder() to construct. - private ReplicationLock(Builder builder) { + private ReplicationLock(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private ReplicationLock(boolean noInit) {} - + private ReplicationLock(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final ReplicationLock defaultInstance; public static ReplicationLock getDefaultInstance() { return defaultInstance; } - + public ReplicationLock getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ReplicationLock( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + lockOwner_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ReplicationLock parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationLock(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string lockOwner = 1; public static final int LOCKOWNER_FIELD_NUMBER = 1; private java.lang.Object lockOwner_; + /** + * required string lockOwner = 1; + */ public boolean hasLockOwner() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getLockOwner() { + /** + * required string lockOwner = 1; + */ + public java.lang.String getLockOwner() { java.lang.Object ref = lockOwner_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { lockOwner_ = s; } return s; } } - private com.google.protobuf.ByteString getLockOwnerBytes() { + /** + * required string lockOwner = 1; + */ + public com.google.protobuf.ByteString + getLockOwnerBytes() { java.lang.Object ref = lockOwner_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); lockOwner_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { lockOwner_ = ""; } @@ -4561,7 +6097,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasLockOwner()) { memoizedIsInitialized = 0; return false; @@ -4569,7 +6105,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -4578,12 +6114,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -4593,14 +6129,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -4610,7 +6146,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) obj; - + boolean result = true; result = result && (hasLockOwner() == other.hasLockOwner()); if (hasLockOwner()) { @@ -4621,9 +6157,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLockOwner()) { @@ -4631,89 +6171,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getLockOwner().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code ReplicationLock} + * + *
+     **
+     * Used by replication. Used to lock a region server during failover.
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLockOrBuilder { @@ -4721,18 +6256,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -4743,27 +6281,27 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); lockOwner_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); if (!result.isInitialized()) { @@ -4771,17 +6309,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock(this); int from_bitField0_ = bitField0_; @@ -4794,7 +6322,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)other); @@ -4803,16 +6331,18 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance()) return this; if (other.hasLockOwner()) { - setLockOwner(other.getLockOwner()); + bitField0_ |= 0x00000001; + lockOwner_ = other.lockOwner_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasLockOwner()) { @@ -4820,57 +6350,69 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - lockOwner_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string lockOwner = 1; private java.lang.Object lockOwner_ = ""; + /** + * required string lockOwner = 1; + */ public boolean hasLockOwner() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getLockOwner() { + /** + * required string lockOwner = 1; + */ + public java.lang.String getLockOwner() { java.lang.Object ref = lockOwner_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); lockOwner_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string lockOwner = 1; + */ + public com.google.protobuf.ByteString + getLockOwnerBytes() { + java.lang.Object ref = lockOwner_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + lockOwner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setLockOwner(String value) { + /** + * required string lockOwner = 1; + */ + public Builder setLockOwner( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -4879,157 +6421,344 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * required string lockOwner = 1; + */ public Builder clearLockOwner() { bitField0_ = (bitField0_ & ~0x00000001); lockOwner_ = getDefaultInstance().getLockOwner(); onChanged(); return this; } - void setLockOwner(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string lockOwner = 1; + */ + public Builder setLockOwnerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; lockOwner_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:ReplicationLock) } - + static { defaultInstance = new ReplicationLock(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:ReplicationLock) } - + public interface TableLockOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional bytes tableName = 1; + /** + * optional bytes tableName = 1; + */ boolean hasTableName(); + /** + * optional bytes tableName = 1; + */ com.google.protobuf.ByteString getTableName(); - + // optional .ServerName lockOwner = 2; + /** + * optional .ServerName lockOwner = 2; + */ boolean hasLockOwner(); + /** + * optional .ServerName lockOwner = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner(); + /** + * optional .ServerName lockOwner = 2; + */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder(); - + // optional int64 threadId = 3; + /** + * optional int64 threadId = 3; + */ boolean hasThreadId(); + /** + * optional int64 threadId = 3; + */ long getThreadId(); - + // optional bool isShared = 4; + /** + * optional bool isShared = 4; + */ boolean hasIsShared(); + /** + * optional bool isShared = 4; + */ boolean getIsShared(); - + // optional string purpose = 5; + /** + * optional string purpose = 5; + */ boolean hasPurpose(); - String getPurpose(); + /** + * optional string purpose = 5; + */ + java.lang.String getPurpose(); + /** + * optional string purpose = 5; + */ + com.google.protobuf.ByteString + getPurposeBytes(); } + /** + * Protobuf type {@code TableLock} + * + *
+   **
+   * Metadata associated with a table lock in zookeeper
+   * 
+ */ public static final class TableLock extends com.google.protobuf.GeneratedMessage implements TableLockOrBuilder { // Use TableLock.newBuilder() to construct. - private TableLock(Builder builder) { + private TableLock(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TableLock(boolean noInit) {} - + private TableLock(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TableLock defaultInstance; public static TableLock getDefaultInstance() { return defaultInstance; } - + public TableLock getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TableLock( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = lockOwner_.toBuilder(); + } + lockOwner_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(lockOwner_); + lockOwner_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 24: { + bitField0_ |= 0x00000004; + threadId_ = input.readInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + isShared_ = input.readBool(); + break; + } + case 42: { + bitField0_ |= 0x00000010; + purpose_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TableLock parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableLock(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; + /** + * optional bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - + // optional .ServerName lockOwner = 2; public static final int LOCKOWNER_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName lockOwner_; + /** + * optional .ServerName lockOwner = 2; + */ public boolean hasLockOwner() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerName lockOwner = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner() { return lockOwner_; } + /** + * optional .ServerName lockOwner = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder() { return lockOwner_; } - + // optional int64 threadId = 3; public static final int THREADID_FIELD_NUMBER = 3; private long threadId_; + /** + * optional int64 threadId = 3; + */ public boolean hasThreadId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional int64 threadId = 3; + */ public long getThreadId() { return threadId_; } - + // optional bool isShared = 4; public static final int ISSHARED_FIELD_NUMBER = 4; private boolean isShared_; + /** + * optional bool isShared = 4; + */ public boolean hasIsShared() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool isShared = 4; + */ public boolean getIsShared() { return isShared_; } - + // optional string purpose = 5; public static final int PURPOSE_FIELD_NUMBER = 5; private java.lang.Object purpose_; + /** + * optional string purpose = 5; + */ public boolean hasPurpose() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public String getPurpose() { + /** + * optional string purpose = 5; + */ + public java.lang.String getPurpose() { java.lang.Object ref = purpose_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { purpose_ = s; } return s; } } - private com.google.protobuf.ByteString getPurposeBytes() { + /** + * optional string purpose = 5; + */ + public com.google.protobuf.ByteString + getPurposeBytes() { java.lang.Object ref = purpose_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); purpose_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -5041,7 +6770,7 @@ public final class ZooKeeperProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (hasLockOwner()) { if (!getLockOwner().isInitialized()) { memoizedIsInitialized = 0; @@ -5051,7 +6780,7 @@ public final class ZooKeeperProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -5072,12 +6801,12 @@ public final class ZooKeeperProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -5103,14 +6832,14 @@ public final class ZooKeeperProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -5120,7 +6849,7 @@ public final class ZooKeeperProtos { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) obj; - + boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -5151,9 +6880,13 @@ public final class ZooKeeperProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { @@ -5177,89 +6910,84 @@ public final class ZooKeeperProtos { hash = (53 * hash) + getPurpose().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TableLock} + * + *
+     **
+     * Metadata associated with a table lock in zookeeper
+     * 
+ */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLockOrBuilder { @@ -5267,18 +6995,21 @@ public final class ZooKeeperProtos { getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class); } - + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -5290,7 +7021,7 @@ public final class ZooKeeperProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -5309,20 +7040,20 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor; } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.getDefaultInstance(); } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock build() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock result = buildPartial(); if (!result.isInitialized()) { @@ -5330,17 +7061,7 @@ public final class ZooKeeperProtos { } return result; } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock(this); int from_bitField0_ = bitField0_; @@ -5373,7 +7094,7 @@ public final class ZooKeeperProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock)other); @@ -5382,7 +7103,7 @@ public final class ZooKeeperProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.getDefaultInstance()) return this; if (other.hasTableName()) { @@ -5398,12 +7119,14 @@ public final class ZooKeeperProtos { setIsShared(other.getIsShared()); } if (other.hasPurpose()) { - setPurpose(other.getPurpose()); + bitField0_ |= 0x00000010; + purpose_ = other.purpose_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (hasLockOwner()) { if (!getLockOwner().isInitialized()) { @@ -5413,73 +7136,43 @@ public final class ZooKeeperProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasLockOwner()) { - subBuilder.mergeFrom(getLockOwner()); - } - input.readMessage(subBuilder, extensionRegistry); - setLockOwner(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - threadId_ = input.readInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - isShared_ = input.readBool(); - break; - } - case 42: { - bitField0_ |= 0x00000010; - purpose_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes tableName = 1; + */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * optional bytes tableName = 1; + */ public com.google.protobuf.ByteString getTableName() { return tableName_; } + /** + * optional bytes tableName = 1; + */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -5489,20 +7182,29 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * optional bytes tableName = 1; + */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } - + // optional .ServerName lockOwner = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> lockOwnerBuilder_; + /** + * optional .ServerName lockOwner = 2; + */ public boolean hasLockOwner() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional .ServerName lockOwner = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner() { if (lockOwnerBuilder_ == null) { return lockOwner_; @@ -5510,6 +7212,9 @@ public final class ZooKeeperProtos { return lockOwnerBuilder_.getMessage(); } } + /** + * optional .ServerName lockOwner = 2; + */ public Builder setLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (lockOwnerBuilder_ == null) { if (value == null) { @@ -5523,6 +7228,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName lockOwner = 2; + */ public Builder setLockOwner( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (lockOwnerBuilder_ == null) { @@ -5534,6 +7242,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName lockOwner = 2; + */ public Builder mergeLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (lockOwnerBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && @@ -5550,6 +7261,9 @@ public final class ZooKeeperProtos { bitField0_ |= 0x00000002; return this; } + /** + * optional .ServerName lockOwner = 2; + */ public Builder clearLockOwner() { if (lockOwnerBuilder_ == null) { lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); @@ -5560,11 +7274,17 @@ public final class ZooKeeperProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } + /** + * optional .ServerName lockOwner = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getLockOwnerBuilder() { bitField0_ |= 0x00000002; onChanged(); return getLockOwnerFieldBuilder().getBuilder(); } + /** + * optional .ServerName lockOwner = 2; + */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder() { if (lockOwnerBuilder_ != null) { return lockOwnerBuilder_.getMessageOrBuilder(); @@ -5572,6 +7292,9 @@ public final class ZooKeeperProtos { return lockOwner_; } } + /** + * optional .ServerName lockOwner = 2; + */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getLockOwnerFieldBuilder() { @@ -5585,65 +7308,116 @@ public final class ZooKeeperProtos { } return lockOwnerBuilder_; } - + // optional int64 threadId = 3; private long threadId_ ; + /** + * optional int64 threadId = 3; + */ public boolean hasThreadId() { return ((bitField0_ & 0x00000004) == 0x00000004); } + /** + * optional int64 threadId = 3; + */ public long getThreadId() { return threadId_; } + /** + * optional int64 threadId = 3; + */ public Builder setThreadId(long value) { bitField0_ |= 0x00000004; threadId_ = value; onChanged(); return this; } + /** + * optional int64 threadId = 3; + */ public Builder clearThreadId() { bitField0_ = (bitField0_ & ~0x00000004); threadId_ = 0L; onChanged(); return this; } - + // optional bool isShared = 4; private boolean isShared_ ; + /** + * optional bool isShared = 4; + */ public boolean hasIsShared() { return ((bitField0_ & 0x00000008) == 0x00000008); } + /** + * optional bool isShared = 4; + */ public boolean getIsShared() { return isShared_; } + /** + * optional bool isShared = 4; + */ public Builder setIsShared(boolean value) { bitField0_ |= 0x00000008; isShared_ = value; onChanged(); return this; } + /** + * optional bool isShared = 4; + */ public Builder clearIsShared() { bitField0_ = (bitField0_ & ~0x00000008); isShared_ = false; onChanged(); return this; } - + // optional string purpose = 5; private java.lang.Object purpose_ = ""; + /** + * optional string purpose = 5; + */ public boolean hasPurpose() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public String getPurpose() { + /** + * optional string purpose = 5; + */ + public java.lang.String getPurpose() { java.lang.Object ref = purpose_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); purpose_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string purpose = 5; + */ + public com.google.protobuf.ByteString + getPurposeBytes() { + java.lang.Object ref = purpose_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + purpose_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setPurpose(String value) { + /** + * optional string purpose = 5; + */ + public Builder setPurpose( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -5652,29 +7426,40 @@ public final class ZooKeeperProtos { onChanged(); return this; } + /** + * optional string purpose = 5; + */ public Builder clearPurpose() { bitField0_ = (bitField0_ & ~0x00000010); purpose_ = getDefaultInstance().getPurpose(); onChanged(); return this; } - void setPurpose(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000010; + /** + * optional string purpose = 5; + */ + public Builder setPurposeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; purpose_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:TableLock) } - + static { defaultInstance = new TableLock(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TableLock) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_RootRegionServer_descriptor; private static @@ -5730,7 +7515,7 @@ public final class ZooKeeperProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TableLock_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -5774,89 +7559,67 @@ public final class ZooKeeperProtos { internal_static_RootRegionServer_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RootRegionServer_descriptor, - new java.lang.String[] { "Server", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.Builder.class); + new java.lang.String[] { "Server", }); internal_static_Master_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_Master_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Master_descriptor, - new java.lang.String[] { "Master", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class); + new java.lang.String[] { "Master", }); internal_static_ClusterUp_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_ClusterUp_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ClusterUp_descriptor, - new java.lang.String[] { "StartDate", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); + new java.lang.String[] { "StartDate", }); internal_static_RegionTransition_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_RegionTransition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionTransition_descriptor, - new java.lang.String[] { "EventTypeCode", "RegionName", "CreateTime", "ServerName", "Payload", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class); + new java.lang.String[] { "EventTypeCode", "RegionName", "CreateTime", "ServerName", "Payload", }); internal_static_SplitLogTask_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_SplitLogTask_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SplitLogTask_descriptor, - new java.lang.String[] { "State", "ServerName", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); + new java.lang.String[] { "State", "ServerName", }); internal_static_Table_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_Table_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Table_descriptor, - new java.lang.String[] { "State", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class); + new java.lang.String[] { "State", }); internal_static_ReplicationPeer_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_ReplicationPeer_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicationPeer_descriptor, - new java.lang.String[] { "Clusterkey", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); + new java.lang.String[] { "Clusterkey", }); internal_static_ReplicationState_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_ReplicationState_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicationState_descriptor, - new java.lang.String[] { "State", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); + new java.lang.String[] { "State", }); internal_static_ReplicationHLogPosition_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_ReplicationHLogPosition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicationHLogPosition_descriptor, - new java.lang.String[] { "Position", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); + new java.lang.String[] { "Position", }); internal_static_ReplicationLock_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_ReplicationLock_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicationLock_descriptor, - new java.lang.String[] { "LockOwner", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class); + new java.lang.String[] { "LockOwner", }); internal_static_TableLock_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_TableLock_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TableLock_descriptor, - new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class); + new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", }); return null; } }; @@ -5866,6 +7629,6 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java index 6cbda3b..b603db5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java @@ -119,7 +119,7 @@ import com.google.common.base.Function; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.protobuf.CodedInputStream; import com.google.protobuf.Message; -import com.google.protobuf.Message.Builder; +import com.google.protobuf.Parser; import com.google.protobuf.TextFormat; // Uses Writables doing sasl @@ -1673,8 +1673,7 @@ public abstract class HBaseServer implements RpcServer { CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length); int headerSize = cis.readRawVarint32(); offset = cis.getTotalBytesRead(); - RequestHeader header = - RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build(); + RequestHeader header = RequestHeader.PARSER.parseFrom(buf, offset, headerSize); offset += headerSize; int id = header.getCallId(); if (LOG.isDebugEnabled()) { @@ -1701,15 +1700,13 @@ public abstract class HBaseServer implements RpcServer { Message m = methodCache.getMethodArgType(method); // Check that there is a param to deserialize. if (m != null) { - Builder builder = null; - builder = m.newBuilderForType(); + Parser parser = m.getParserForType(); // To read the varint, I need an inputstream; might as well be a CIS. cis = CodedInputStream.newInstance(buf, offset, buf.length); int paramSize = cis.readRawVarint32(); offset += cis.getTotalBytesRead(); - if (builder != null) { - builder.mergeFrom(buf, offset, paramSize); - param = builder.build(); + if (parser != null) { + param = parser.parseFrom(buf, offset, paramSize); } offset += paramSize; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 9958a0f..79d00e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -184,7 +184,6 @@ import com.google.protobuf.Service; @InterfaceAudience.Private public class HRegion implements HeapSize { // , Writable{ public static final Log LOG = LogFactory.getLog(HRegion.class); - private static final String MERGEDIR = ".merges"; public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY = "hbase.hregion.scan.loadColumnFamiliesOnDemand"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java index a9133aa..61b47ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java @@ -10,64 +10,160 @@ public final class ColumnAggregationProtos { } public interface SumRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes family = 1; + /** + * required bytes family = 1; + */ boolean hasFamily(); + /** + * required bytes family = 1; + */ com.google.protobuf.ByteString getFamily(); - + // optional bytes qualifier = 2; + /** + * optional bytes qualifier = 2; + */ boolean hasQualifier(); + /** + * optional bytes qualifier = 2; + */ com.google.protobuf.ByteString getQualifier(); } + /** + * Protobuf type {@code SumRequest} + */ public static final class SumRequest extends com.google.protobuf.GeneratedMessage implements SumRequestOrBuilder { // Use SumRequest.newBuilder() to construct. - private SumRequest(Builder builder) { + private SumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SumRequest(boolean noInit) {} - + private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SumRequest defaultInstance; public static SumRequest getDefaultInstance() { return defaultInstance; } - + public SumRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } - + // optional bytes qualifier = 2; public static final int QUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 2; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes qualifier = 2; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } - + private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; qualifier_ = com.google.protobuf.ByteString.EMPTY; @@ -76,7 +172,7 @@ public final class ColumnAggregationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasFamily()) { memoizedIsInitialized = 0; return false; @@ -84,7 +180,7 @@ public final class ColumnAggregationProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -96,12 +192,12 @@ public final class ColumnAggregationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -115,14 +211,14 @@ public final class ColumnAggregationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -132,7 +228,7 @@ public final class ColumnAggregationProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) obj; - + boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { @@ -148,9 +244,13 @@ public final class ColumnAggregationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { @@ -162,89 +262,79 @@ public final class ColumnAggregationProtos { hash = (53 * hash) + getQualifier().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SumRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequestOrBuilder { @@ -252,18 +342,21 @@ public final class ColumnAggregationProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -274,7 +367,7 @@ public final class ColumnAggregationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -283,20 +376,20 @@ public final class ColumnAggregationProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = buildPartial(); if (!result.isInitialized()) { @@ -304,17 +397,7 @@ public final class ColumnAggregationProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest(this); int from_bitField0_ = bitField0_; @@ -331,7 +414,7 @@ public final class ColumnAggregationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)other); @@ -340,7 +423,7 @@ public final class ColumnAggregationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance()) return this; if (other.hasFamily()) { @@ -352,7 +435,7 @@ public final class ColumnAggregationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasFamily()) { @@ -360,54 +443,43 @@ public final class ColumnAggregationProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - qualifier_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes family = 1; + */ public com.google.protobuf.ByteString getFamily() { return family_; } + /** + * required bytes family = 1; + */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -417,21 +489,33 @@ public final class ColumnAggregationProtos { onChanged(); return this; } + /** + * required bytes family = 1; + */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } - + // optional bytes qualifier = 2; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 2; + */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * optional bytes qualifier = 2; + */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } + /** + * optional bytes qualifier = 2; + */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -441,70 +525,152 @@ public final class ColumnAggregationProtos { onChanged(); return this; } + /** + * optional bytes qualifier = 2; + */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SumRequest) } - + static { defaultInstance = new SumRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SumRequest) } - + public interface SumResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int64 sum = 1; + /** + * required int64 sum = 1; + */ boolean hasSum(); + /** + * required int64 sum = 1; + */ long getSum(); } + /** + * Protobuf type {@code SumResponse} + */ public static final class SumResponse extends com.google.protobuf.GeneratedMessage implements SumResponseOrBuilder { // Use SumResponse.newBuilder() to construct. - private SumResponse(Builder builder) { + private SumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private SumResponse(boolean noInit) {} - + private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final SumResponse defaultInstance; public static SumResponse getDefaultInstance() { return defaultInstance; } - + public SumResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + sum_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int64 sum = 1; public static final int SUM_FIELD_NUMBER = 1; private long sum_; + /** + * required int64 sum = 1; + */ public boolean hasSum() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 sum = 1; + */ public long getSum() { return sum_; } - + private void initFields() { sum_ = 0L; } @@ -512,7 +678,7 @@ public final class ColumnAggregationProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasSum()) { memoizedIsInitialized = 0; return false; @@ -520,7 +686,7 @@ public final class ColumnAggregationProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -529,12 +695,12 @@ public final class ColumnAggregationProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -544,14 +710,14 @@ public final class ColumnAggregationProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -561,7 +727,7 @@ public final class ColumnAggregationProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) obj; - + boolean result = true; result = result && (hasSum() == other.hasSum()); if (hasSum()) { @@ -572,9 +738,13 @@ public final class ColumnAggregationProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSum()) { @@ -582,89 +752,79 @@ public final class ColumnAggregationProtos { hash = (53 * hash) + hashLong(getSum()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code SumResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponseOrBuilder { @@ -672,18 +832,21 @@ public final class ColumnAggregationProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -694,27 +857,27 @@ public final class ColumnAggregationProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); sum_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = buildPartial(); if (!result.isInitialized()) { @@ -722,17 +885,7 @@ public final class ColumnAggregationProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse(this); int from_bitField0_ = bitField0_; @@ -745,7 +898,7 @@ public final class ColumnAggregationProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse)other); @@ -754,7 +907,7 @@ public final class ColumnAggregationProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance()) return this; if (other.hasSum()) { @@ -763,7 +916,7 @@ public final class ColumnAggregationProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasSum()) { @@ -771,85 +924,88 @@ public final class ColumnAggregationProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - sum_ = input.readInt64(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int64 sum = 1; private long sum_ ; + /** + * required int64 sum = 1; + */ public boolean hasSum() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int64 sum = 1; + */ public long getSum() { return sum_; } + /** + * required int64 sum = 1; + */ public Builder setSum(long value) { bitField0_ |= 0x00000001; sum_ = value; onChanged(); return this; } + /** + * required int64 sum = 1; + */ public Builder clearSum() { bitField0_ = (bitField0_ & ~0x00000001); sum_ = 0L; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:SumResponse) } - + static { defaultInstance = new SumResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:SumResponse) } - + + /** + * Protobuf service {@code ColumnAggregationService} + */ public static abstract class ColumnAggregationService implements com.google.protobuf.Service { protected ColumnAggregationService() {} - + public interface Interface { + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ public abstract void sum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new ColumnAggregationService() { @@ -860,10 +1016,10 @@ public final class ColumnAggregationProtos { com.google.protobuf.RpcCallback done) { impl.sum(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -871,7 +1027,7 @@ public final class ColumnAggregationProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -889,7 +1045,7 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -905,7 +1061,7 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -921,15 +1077,18 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ public abstract void sum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -939,7 +1098,7 @@ public final class ColumnAggregationProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -961,7 +1120,7 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -977,7 +1136,7 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -993,23 +1152,23 @@ public final class ColumnAggregationProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.ColumnAggregationService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void sum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, @@ -1025,26 +1184,26 @@ public final class ColumnAggregationProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse sum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse sum( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request) @@ -1055,10 +1214,12 @@ public final class ColumnAggregationProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:ColumnAggregationService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_SumRequest_descriptor; private static @@ -1069,7 +1230,7 @@ public final class ColumnAggregationProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SumResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1096,17 +1257,13 @@ public final class ColumnAggregationProtos { internal_static_SumRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SumRequest_descriptor, - new java.lang.String[] { "Family", "Qualifier", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class); + new java.lang.String[] { "Family", "Qualifier", }); internal_static_SumResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_SumResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SumResponse_descriptor, - new java.lang.String[] { "Sum", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class); + new java.lang.String[] { "Sum", }); return null; } }; @@ -1115,6 +1272,6 @@ public final class ColumnAggregationProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java index e46cdc1..7ba5b8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java @@ -10,64 +10,160 @@ public final class IncrementCounterProcessorTestProtos { } public interface IncCounterProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); - + // required int32 counter = 2; + /** + * required int32 counter = 2; + */ boolean hasCounter(); + /** + * required int32 counter = 2; + */ int getCounter(); } + /** + * Protobuf type {@code IncCounterProcessorRequest} + */ public static final class IncCounterProcessorRequest extends com.google.protobuf.GeneratedMessage implements IncCounterProcessorRequestOrBuilder { // Use IncCounterProcessorRequest.newBuilder() to construct. - private IncCounterProcessorRequest(Builder builder) { + private IncCounterProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IncCounterProcessorRequest(boolean noInit) {} - + private IncCounterProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IncCounterProcessorRequest defaultInstance; public static IncCounterProcessorRequest getDefaultInstance() { return defaultInstance; } - + public IncCounterProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IncCounterProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + counter_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IncCounterProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IncCounterProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // required int32 counter = 2; public static final int COUNTER_FIELD_NUMBER = 2; private int counter_; + /** + * required int32 counter = 2; + */ public boolean hasCounter() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int32 counter = 2; + */ public int getCounter() { return counter_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; counter_ = 0; @@ -76,7 +172,7 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -88,7 +184,7 @@ public final class IncrementCounterProcessorTestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -100,12 +196,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -119,14 +215,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -136,7 +232,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -152,9 +248,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -166,89 +266,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getCounter(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IncCounterProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequestOrBuilder { @@ -256,18 +346,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -278,7 +371,7 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -287,20 +380,20 @@ public final class IncrementCounterProcessorTestProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -308,17 +401,7 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest(this); int from_bitField0_ = bitField0_; @@ -335,7 +418,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)other); @@ -344,7 +427,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance()) return this; if (other.hasRow()) { @@ -356,7 +439,7 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -368,54 +451,43 @@ public final class IncrementCounterProcessorTestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - counter_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -425,91 +497,185 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // required int32 counter = 2; private int counter_ ; + /** + * required int32 counter = 2; + */ public boolean hasCounter() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required int32 counter = 2; + */ public int getCounter() { return counter_; } + /** + * required int32 counter = 2; + */ public Builder setCounter(int value) { bitField0_ |= 0x00000002; counter_ = value; onChanged(); return this; } + /** + * required int32 counter = 2; + */ public Builder clearCounter() { bitField0_ = (bitField0_ & ~0x00000002); counter_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IncCounterProcessorRequest) } - + static { defaultInstance = new IncCounterProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IncCounterProcessorRequest) } - + public interface IncCounterProcessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 response = 1; + /** + * required int32 response = 1; + */ boolean hasResponse(); + /** + * required int32 response = 1; + */ int getResponse(); } + /** + * Protobuf type {@code IncCounterProcessorResponse} + */ public static final class IncCounterProcessorResponse extends com.google.protobuf.GeneratedMessage implements IncCounterProcessorResponseOrBuilder { // Use IncCounterProcessorResponse.newBuilder() to construct. - private IncCounterProcessorResponse(Builder builder) { + private IncCounterProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IncCounterProcessorResponse(boolean noInit) {} - + private IncCounterProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IncCounterProcessorResponse defaultInstance; public static IncCounterProcessorResponse getDefaultInstance() { return defaultInstance; } - + public IncCounterProcessorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IncCounterProcessorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + response_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IncCounterProcessorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IncCounterProcessorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 response = 1; public static final int RESPONSE_FIELD_NUMBER = 1; private int response_; + /** + * required int32 response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 response = 1; + */ public int getResponse() { return response_; } - + private void initFields() { response_ = 0; } @@ -517,7 +683,7 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasResponse()) { memoizedIsInitialized = 0; return false; @@ -525,7 +691,7 @@ public final class IncrementCounterProcessorTestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -534,12 +700,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -549,14 +715,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -566,7 +732,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) obj; - + boolean result = true; result = result && (hasResponse() == other.hasResponse()); if (hasResponse()) { @@ -577,9 +743,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResponse()) { @@ -587,89 +757,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getResponse(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IncCounterProcessorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponseOrBuilder { @@ -677,18 +837,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -699,27 +862,27 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); response_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -727,17 +890,7 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse(this); int from_bitField0_ = bitField0_; @@ -750,7 +903,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)other); @@ -759,7 +912,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance()) return this; if (other.hasResponse()) { @@ -768,7 +921,7 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasResponse()) { @@ -776,152 +929,287 @@ public final class IncrementCounterProcessorTestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - response_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 response = 1; private int response_ ; + /** + * required int32 response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 response = 1; + */ public int getResponse() { return response_; } + /** + * required int32 response = 1; + */ public Builder setResponse(int value) { bitField0_ |= 0x00000001; response_ = value; onChanged(); return this; } + /** + * required int32 response = 1; + */ public Builder clearResponse() { bitField0_ = (bitField0_ & ~0x00000001); response_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IncCounterProcessorResponse) } - + static { defaultInstance = new IncCounterProcessorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IncCounterProcessorResponse) } - + public interface FriendsOfFriendsProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes person = 1; + /** + * required bytes person = 1; + */ boolean hasPerson(); + /** + * required bytes person = 1; + */ com.google.protobuf.ByteString getPerson(); - + // required bytes row = 2; + /** + * required bytes row = 2; + */ boolean hasRow(); + /** + * required bytes row = 2; + */ com.google.protobuf.ByteString getRow(); - + // repeated string result = 3; - java.util.List getResultList(); + /** + * repeated string result = 3; + */ + java.util.List + getResultList(); + /** + * repeated string result = 3; + */ int getResultCount(); - String getResult(int index); + /** + * repeated string result = 3; + */ + java.lang.String getResult(int index); + /** + * repeated string result = 3; + */ + com.google.protobuf.ByteString + getResultBytes(int index); } + /** + * Protobuf type {@code FriendsOfFriendsProcessorRequest} + */ public static final class FriendsOfFriendsProcessorRequest extends com.google.protobuf.GeneratedMessage implements FriendsOfFriendsProcessorRequestOrBuilder { // Use FriendsOfFriendsProcessorRequest.newBuilder() to construct. - private FriendsOfFriendsProcessorRequest(Builder builder) { + private FriendsOfFriendsProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FriendsOfFriendsProcessorRequest(boolean noInit) {} - + private FriendsOfFriendsProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FriendsOfFriendsProcessorRequest defaultInstance; public static FriendsOfFriendsProcessorRequest getDefaultInstance() { return defaultInstance; } - + public FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FriendsOfFriendsProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + person_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + row_ = input.readBytes(); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + result_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + result_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FriendsOfFriendsProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FriendsOfFriendsProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes person = 1; public static final int PERSON_FIELD_NUMBER = 1; private com.google.protobuf.ByteString person_; + /** + * required bytes person = 1; + */ public boolean hasPerson() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes person = 1; + */ public com.google.protobuf.ByteString getPerson() { return person_; } - + // required bytes row = 2; public static final int ROW_FIELD_NUMBER = 2; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 2; + */ public boolean hasRow() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes row = 2; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + // repeated string result = 3; public static final int RESULT_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList result_; - public java.util.List + /** + * repeated string result = 3; + */ + public java.util.List getResultList() { return result_; } + /** + * repeated string result = 3; + */ public int getResultCount() { return result_.size(); } - public String getResult(int index) { + /** + * repeated string result = 3; + */ + public java.lang.String getResult(int index) { return result_.get(index); } - + /** + * repeated string result = 3; + */ + public com.google.protobuf.ByteString + getResultBytes(int index) { + return result_.getByteString(index); + } + private void initFields() { person_ = com.google.protobuf.ByteString.EMPTY; row_ = com.google.protobuf.ByteString.EMPTY; @@ -931,7 +1219,7 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPerson()) { memoizedIsInitialized = 0; return false; @@ -943,7 +1231,7 @@ public final class IncrementCounterProcessorTestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -958,12 +1246,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -986,14 +1274,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1003,7 +1291,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) obj; - + boolean result = true; result = result && (hasPerson() == other.hasPerson()); if (hasPerson()) { @@ -1021,9 +1309,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPerson()) { @@ -1039,89 +1331,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getResultList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FriendsOfFriendsProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequestOrBuilder { @@ -1129,18 +1411,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1151,7 +1436,7 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); person_ = com.google.protobuf.ByteString.EMPTY; @@ -1162,38 +1447,28 @@ public final class IncrementCounterProcessorTestProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance(); } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = buildPartial(); if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); + throw newUninitializedMessageException(result); } return result; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest(this); int from_bitField0_ = bitField0_; @@ -1216,7 +1491,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)other); @@ -1225,7 +1500,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance()) return this; if (other.hasPerson()) { @@ -1247,7 +1522,7 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPerson()) { @@ -1259,59 +1534,43 @@ public final class IncrementCounterProcessorTestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - person_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - row_ = input.readBytes(); - break; - } - case 26: { - ensureResultIsMutable(); - result_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes person = 1; private com.google.protobuf.ByteString person_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes person = 1; + */ public boolean hasPerson() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes person = 1; + */ public com.google.protobuf.ByteString getPerson() { return person_; } + /** + * required bytes person = 1; + */ public Builder setPerson(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1321,21 +1580,33 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes person = 1; + */ public Builder clearPerson() { bitField0_ = (bitField0_ & ~0x00000001); person_ = getDefaultInstance().getPerson(); onChanged(); return this; } - + // required bytes row = 2; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 2; + */ public boolean hasRow() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes row = 2; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 2; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -1345,13 +1616,16 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes row = 2; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000002); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // repeated string result = 3; private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureResultIsMutable() { @@ -1360,18 +1634,37 @@ public final class IncrementCounterProcessorTestProtos { bitField0_ |= 0x00000004; } } - public java.util.List + /** + * repeated string result = 3; + */ + public java.util.List getResultList() { return java.util.Collections.unmodifiableList(result_); } + /** + * repeated string result = 3; + */ public int getResultCount() { return result_.size(); } - public String getResult(int index) { + /** + * repeated string result = 3; + */ + public java.lang.String getResult(int index) { return result_.get(index); } + /** + * repeated string result = 3; + */ + public com.google.protobuf.ByteString + getResultBytes(int index) { + return result_.getByteString(index); + } + /** + * repeated string result = 3; + */ public Builder setResult( - int index, String value) { + int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1380,7 +1673,11 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } - public Builder addResult(String value) { + /** + * repeated string result = 3; + */ + public Builder addResult( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1389,86 +1686,204 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * repeated string result = 3; + */ public Builder addAllResult( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureResultIsMutable(); super.addAll(values, result_); onChanged(); return this; } + /** + * repeated string result = 3; + */ public Builder clearResult() { result_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } - void addResult(com.google.protobuf.ByteString value) { - ensureResultIsMutable(); + /** + * repeated string result = 3; + */ + public Builder addResultBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); result_.add(value); onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorRequest) } - + static { defaultInstance = new FriendsOfFriendsProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorRequest) } - + public interface FriendsOfFriendsProcessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // repeated string result = 1; - java.util.List getResultList(); + /** + * repeated string result = 1; + */ + java.util.List + getResultList(); + /** + * repeated string result = 1; + */ int getResultCount(); - String getResult(int index); + /** + * repeated string result = 1; + */ + java.lang.String getResult(int index); + /** + * repeated string result = 1; + */ + com.google.protobuf.ByteString + getResultBytes(int index); } + /** + * Protobuf type {@code FriendsOfFriendsProcessorResponse} + */ public static final class FriendsOfFriendsProcessorResponse extends com.google.protobuf.GeneratedMessage implements FriendsOfFriendsProcessorResponseOrBuilder { // Use FriendsOfFriendsProcessorResponse.newBuilder() to construct. - private FriendsOfFriendsProcessorResponse(Builder builder) { + private FriendsOfFriendsProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private FriendsOfFriendsProcessorResponse(boolean noInit) {} - + private FriendsOfFriendsProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final FriendsOfFriendsProcessorResponse defaultInstance; public static FriendsOfFriendsProcessorResponse getDefaultInstance() { return defaultInstance; } - + public FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private FriendsOfFriendsProcessorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FriendsOfFriendsProcessorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FriendsOfFriendsProcessorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + // repeated string result = 1; public static final int RESULT_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList result_; - public java.util.List + /** + * repeated string result = 1; + */ + public java.util.List getResultList() { return result_; } + /** + * repeated string result = 1; + */ public int getResultCount() { return result_.size(); } - public String getResult(int index) { + /** + * repeated string result = 1; + */ + public java.lang.String getResult(int index) { return result_.get(index); } - + /** + * repeated string result = 1; + */ + public com.google.protobuf.ByteString + getResultBytes(int index) { + return result_.getByteString(index); + } + private void initFields() { result_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @@ -1476,11 +1891,11 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1489,12 +1904,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; { int dataSize = 0; @@ -1509,14 +1924,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1526,7 +1941,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) obj; - + boolean result = true; result = result && getResultList() .equals(other.getResultList()); @@ -1534,9 +1949,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getResultCount() > 0) { @@ -1544,89 +1963,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getResultList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code FriendsOfFriendsProcessorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponseOrBuilder { @@ -1634,18 +2043,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1656,27 +2068,27 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); result_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1684,17 +2096,7 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse(this); int from_bitField0_ = bitField0_; @@ -1707,7 +2109,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)other); @@ -1716,7 +2118,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance()) return this; if (!other.result_.isEmpty()) { @@ -1732,45 +2134,30 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureResultIsMutable(); - result_.add(input.readBytes()); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // repeated string result = 1; private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureResultIsMutable() { @@ -1779,18 +2166,37 @@ public final class IncrementCounterProcessorTestProtos { bitField0_ |= 0x00000001; } } - public java.util.List + /** + * repeated string result = 1; + */ + public java.util.List getResultList() { return java.util.Collections.unmodifiableList(result_); } + /** + * repeated string result = 1; + */ public int getResultCount() { return result_.size(); } - public String getResult(int index) { + /** + * repeated string result = 1; + */ + public java.lang.String getResult(int index) { return result_.get(index); } + /** + * repeated string result = 1; + */ + public com.google.protobuf.ByteString + getResultBytes(int index) { + return result_.getByteString(index); + } + /** + * repeated string result = 1; + */ public Builder setResult( - int index, String value) { + int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1799,7 +2205,11 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } - public Builder addResult(String value) { + /** + * repeated string result = 1; + */ + public Builder addResult( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1808,96 +2218,206 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * repeated string result = 1; + */ public Builder addAllResult( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureResultIsMutable(); super.addAll(values, result_); onChanged(); return this; } + /** + * repeated string result = 1; + */ public Builder clearResult() { result_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } - void addResult(com.google.protobuf.ByteString value) { - ensureResultIsMutable(); + /** + * repeated string result = 1; + */ + public Builder addResultBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); result_.add(value); onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorResponse) } - + static { defaultInstance = new FriendsOfFriendsProcessorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorResponse) } - + public interface RowSwapProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row1 = 1; + /** + * required bytes row1 = 1; + */ boolean hasRow1(); + /** + * required bytes row1 = 1; + */ com.google.protobuf.ByteString getRow1(); - + // required bytes row2 = 2; + /** + * required bytes row2 = 2; + */ boolean hasRow2(); + /** + * required bytes row2 = 2; + */ com.google.protobuf.ByteString getRow2(); } + /** + * Protobuf type {@code RowSwapProcessorRequest} + */ public static final class RowSwapProcessorRequest extends com.google.protobuf.GeneratedMessage implements RowSwapProcessorRequestOrBuilder { // Use RowSwapProcessorRequest.newBuilder() to construct. - private RowSwapProcessorRequest(Builder builder) { + private RowSwapProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RowSwapProcessorRequest(boolean noInit) {} - + private RowSwapProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RowSwapProcessorRequest defaultInstance; public static RowSwapProcessorRequest getDefaultInstance() { return defaultInstance; } - + public RowSwapProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowSwapProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row1_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + row2_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowSwapProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowSwapProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row1 = 1; public static final int ROW1_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row1_; + /** + * required bytes row1 = 1; + */ public boolean hasRow1() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row1 = 1; + */ public com.google.protobuf.ByteString getRow1() { return row1_; } - + // required bytes row2 = 2; public static final int ROW2_FIELD_NUMBER = 2; private com.google.protobuf.ByteString row2_; + /** + * required bytes row2 = 2; + */ public boolean hasRow2() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes row2 = 2; + */ public com.google.protobuf.ByteString getRow2() { return row2_; } - + private void initFields() { row1_ = com.google.protobuf.ByteString.EMPTY; row2_ = com.google.protobuf.ByteString.EMPTY; @@ -1906,7 +2426,7 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow1()) { memoizedIsInitialized = 0; return false; @@ -1918,7 +2438,7 @@ public final class IncrementCounterProcessorTestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1930,12 +2450,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1949,14 +2469,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1966,7 +2486,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) obj; - + boolean result = true; result = result && (hasRow1() == other.hasRow1()); if (hasRow1()) { @@ -1982,9 +2502,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow1()) { @@ -1996,89 +2520,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getRow2().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RowSwapProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequestOrBuilder { @@ -2086,18 +2600,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2108,7 +2625,7 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row1_ = com.google.protobuf.ByteString.EMPTY; @@ -2117,20 +2634,20 @@ public final class IncrementCounterProcessorTestProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2138,17 +2655,7 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest(this); int from_bitField0_ = bitField0_; @@ -2165,7 +2672,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)other); @@ -2174,7 +2681,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance()) return this; if (other.hasRow1()) { @@ -2186,7 +2693,7 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow1()) { @@ -2198,54 +2705,43 @@ public final class IncrementCounterProcessorTestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row1_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - row2_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row1 = 1; private com.google.protobuf.ByteString row1_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row1 = 1; + */ public boolean hasRow1() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row1 = 1; + */ public com.google.protobuf.ByteString getRow1() { return row1_; } + /** + * required bytes row1 = 1; + */ public Builder setRow1(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2255,21 +2751,33 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes row1 = 1; + */ public Builder clearRow1() { bitField0_ = (bitField0_ & ~0x00000001); row1_ = getDefaultInstance().getRow1(); onChanged(); return this; } - + // required bytes row2 = 2; private com.google.protobuf.ByteString row2_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row2 = 2; + */ public boolean hasRow2() { return ((bitField0_ & 0x00000002) == 0x00000002); } + /** + * required bytes row2 = 2; + */ public com.google.protobuf.ByteString getRow2() { return row2_; } + /** + * required bytes row2 = 2; + */ public Builder setRow2(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2279,90 +2787,154 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes row2 = 2; + */ public Builder clearRow2() { bitField0_ = (bitField0_ & ~0x00000002); row2_ = getDefaultInstance().getRow2(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:RowSwapProcessorRequest) } - + static { defaultInstance = new RowSwapProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RowSwapProcessorRequest) } - + public interface RowSwapProcessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code RowSwapProcessorResponse} + */ public static final class RowSwapProcessorResponse extends com.google.protobuf.GeneratedMessage implements RowSwapProcessorResponseOrBuilder { // Use RowSwapProcessorResponse.newBuilder() to construct. - private RowSwapProcessorResponse(Builder builder) { + private RowSwapProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private RowSwapProcessorResponse(boolean noInit) {} - + private RowSwapProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final RowSwapProcessorResponse defaultInstance; public static RowSwapProcessorResponse getDefaultInstance() { return defaultInstance; } - + public RowSwapProcessorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowSwapProcessorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowSwapProcessorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowSwapProcessorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2372,101 +2944,95 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code RowSwapProcessorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponseOrBuilder { @@ -2474,18 +3040,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2496,25 +3065,25 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2522,23 +3091,13 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse)other); @@ -2547,102 +3106,171 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:RowSwapProcessorResponse) } - + static { defaultInstance = new RowSwapProcessorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:RowSwapProcessorResponse) } - + public interface TimeoutProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bytes row = 1; + /** + * required bytes row = 1; + */ boolean hasRow(); + /** + * required bytes row = 1; + */ com.google.protobuf.ByteString getRow(); } + /** + * Protobuf type {@code TimeoutProcessorRequest} + */ public static final class TimeoutProcessorRequest extends com.google.protobuf.GeneratedMessage implements TimeoutProcessorRequestOrBuilder { // Use TimeoutProcessorRequest.newBuilder() to construct. - private TimeoutProcessorRequest(Builder builder) { + private TimeoutProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TimeoutProcessorRequest(boolean noInit) {} - + private TimeoutProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TimeoutProcessorRequest defaultInstance; public static TimeoutProcessorRequest getDefaultInstance() { return defaultInstance; } - + public TimeoutProcessorRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TimeoutProcessorRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TimeoutProcessorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeoutProcessorRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } - + private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; } @@ -2650,7 +3278,7 @@ public final class IncrementCounterProcessorTestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasRow()) { memoizedIsInitialized = 0; return false; @@ -2658,7 +3286,7 @@ public final class IncrementCounterProcessorTestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2667,12 +3295,12 @@ public final class IncrementCounterProcessorTestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2682,14 +3310,14 @@ public final class IncrementCounterProcessorTestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2699,7 +3327,7 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) obj; - + boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { @@ -2710,9 +3338,13 @@ public final class IncrementCounterProcessorTestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRow()) { @@ -2720,89 +3352,79 @@ public final class IncrementCounterProcessorTestProtos { hash = (53 * hash) + getRow().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TimeoutProcessorRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequestOrBuilder { @@ -2810,18 +3432,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2832,27 +3457,27 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2860,17 +3485,7 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest(this); int from_bitField0_ = bitField0_; @@ -2883,7 +3498,7 @@ public final class IncrementCounterProcessorTestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)other); @@ -2892,7 +3507,7 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance()) return this; if (other.hasRow()) { @@ -2901,7 +3516,7 @@ public final class IncrementCounterProcessorTestProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasRow()) { @@ -2909,49 +3524,43 @@ public final class IncrementCounterProcessorTestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes row = 1; + */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bytes row = 1; + */ public com.google.protobuf.ByteString getRow() { return row_; } + /** + * required bytes row = 1; + */ public Builder setRow(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); @@ -2961,90 +3570,154 @@ public final class IncrementCounterProcessorTestProtos { onChanged(); return this; } + /** + * required bytes row = 1; + */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TimeoutProcessorRequest) } - + static { defaultInstance = new TimeoutProcessorRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TimeoutProcessorRequest) } - + public interface TimeoutProcessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code TimeoutProcessorResponse} + */ public static final class TimeoutProcessorResponse extends com.google.protobuf.GeneratedMessage implements TimeoutProcessorResponseOrBuilder { // Use TimeoutProcessorResponse.newBuilder() to construct. - private TimeoutProcessorResponse(Builder builder) { + private TimeoutProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TimeoutProcessorResponse(boolean noInit) {} - + private TimeoutProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TimeoutProcessorResponse defaultInstance; public static TimeoutProcessorResponse getDefaultInstance() { return defaultInstance; } - + public TimeoutProcessorResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TimeoutProcessorResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TimeoutProcessorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeoutProcessorResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3054,101 +3727,95 @@ public final class IncrementCounterProcessorTestProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TimeoutProcessorResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponseOrBuilder { @@ -3156,18 +3823,21 @@ public final class IncrementCounterProcessorTestProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3178,25 +3848,25 @@ public final class IncrementCounterProcessorTestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3204,23 +3874,13 @@ public final class IncrementCounterProcessorTestProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse)other); @@ -3229,56 +3889,46 @@ public final class IncrementCounterProcessorTestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:TimeoutProcessorResponse) } - + static { defaultInstance = new TimeoutProcessorResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TimeoutProcessorResponse) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_IncCounterProcessorRequest_descriptor; private static @@ -3319,7 +3969,7 @@ public final class IncrementCounterProcessorTestProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TimeoutProcessorResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -3353,65 +4003,49 @@ public final class IncrementCounterProcessorTestProtos { internal_static_IncCounterProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IncCounterProcessorRequest_descriptor, - new java.lang.String[] { "Row", "Counter", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); + new java.lang.String[] { "Row", "Counter", }); internal_static_IncCounterProcessorResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_IncCounterProcessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IncCounterProcessorResponse_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); + new java.lang.String[] { "Response", }); internal_static_FriendsOfFriendsProcessorRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FriendsOfFriendsProcessorRequest_descriptor, - new java.lang.String[] { "Person", "Row", "Result", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); + new java.lang.String[] { "Person", "Row", "Result", }); internal_static_FriendsOfFriendsProcessorResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FriendsOfFriendsProcessorResponse_descriptor, - new java.lang.String[] { "Result", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); + new java.lang.String[] { "Result", }); internal_static_RowSwapProcessorRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_RowSwapProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowSwapProcessorRequest_descriptor, - new java.lang.String[] { "Row1", "Row2", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); + new java.lang.String[] { "Row1", "Row2", }); internal_static_RowSwapProcessorResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_RowSwapProcessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowSwapProcessorResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); + new java.lang.String[] { }); internal_static_TimeoutProcessorRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_TimeoutProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TimeoutProcessorRequest_descriptor, - new java.lang.String[] { "Row", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); + new java.lang.String[] { "Row", }); internal_static_TimeoutProcessorResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_TimeoutProcessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TimeoutProcessorResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -3420,6 +4054,6 @@ public final class IncrementCounterProcessorTestProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java index ca86c51..57903cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java @@ -11,69 +11,130 @@ public final class PingProtos { public interface PingRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code PingRequest} + */ public static final class PingRequest extends com.google.protobuf.GeneratedMessage implements PingRequestOrBuilder { // Use PingRequest.newBuilder() to construct. - private PingRequest(Builder builder) { + private PingRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PingRequest(boolean noInit) {} - + private PingRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PingRequest defaultInstance; public static PingRequest getDefaultInstance() { return defaultInstance; } - + public PingRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PingRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PingRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PingRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -83,101 +144,95 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PingRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequestOrBuilder { @@ -185,18 +240,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -207,25 +265,25 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = buildPartial(); if (!result.isInitialized()) { @@ -233,23 +291,13 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest)other); @@ -258,124 +306,203 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:PingRequest) } - + static { defaultInstance = new PingRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PingRequest) } - + public interface PingResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string pong = 1; + /** + * required string pong = 1; + */ boolean hasPong(); - String getPong(); + /** + * required string pong = 1; + */ + java.lang.String getPong(); + /** + * required string pong = 1; + */ + com.google.protobuf.ByteString + getPongBytes(); } + /** + * Protobuf type {@code PingResponse} + */ public static final class PingResponse extends com.google.protobuf.GeneratedMessage implements PingResponseOrBuilder { // Use PingResponse.newBuilder() to construct. - private PingResponse(Builder builder) { + private PingResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private PingResponse(boolean noInit) {} - + private PingResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final PingResponse defaultInstance; public static PingResponse getDefaultInstance() { return defaultInstance; } - + public PingResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PingResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + pong_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PingResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PingResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string pong = 1; public static final int PONG_FIELD_NUMBER = 1; private java.lang.Object pong_; + /** + * required string pong = 1; + */ public boolean hasPong() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getPong() { + /** + * required string pong = 1; + */ + public java.lang.String getPong() { java.lang.Object ref = pong_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { pong_ = s; } return s; } } - private com.google.protobuf.ByteString getPongBytes() { + /** + * required string pong = 1; + */ + public com.google.protobuf.ByteString + getPongBytes() { java.lang.Object ref = pong_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); pong_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { pong_ = ""; } @@ -383,7 +510,7 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasPong()) { memoizedIsInitialized = 0; return false; @@ -391,7 +518,7 @@ public final class PingProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -400,12 +527,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -415,14 +542,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -432,7 +559,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) obj; - + boolean result = true; result = result && (hasPong() == other.hasPong()); if (hasPong()) { @@ -443,9 +570,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPong()) { @@ -453,89 +584,79 @@ public final class PingProtos { hash = (53 * hash) + getPong().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code PingResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponseOrBuilder { @@ -543,18 +664,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -565,27 +689,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); pong_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = buildPartial(); if (!result.isInitialized()) { @@ -593,17 +717,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse(this); int from_bitField0_ = bitField0_; @@ -616,7 +730,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse)other); @@ -625,16 +739,18 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance()) return this; if (other.hasPong()) { - setPong(other.getPong()); + bitField0_ |= 0x00000001; + pong_ = other.pong_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasPong()) { @@ -642,57 +758,69 @@ public final class PingProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - pong_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string pong = 1; private java.lang.Object pong_ = ""; + /** + * required string pong = 1; + */ public boolean hasPong() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getPong() { + /** + * required string pong = 1; + */ + public java.lang.String getPong() { java.lang.Object ref = pong_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); pong_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setPong(String value) { + /** + * required string pong = 1; + */ + public com.google.protobuf.ByteString + getPongBytes() { + java.lang.Object ref = pong_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pong_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string pong = 1; + */ + public Builder setPong( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -701,95 +829,167 @@ public final class PingProtos { onChanged(); return this; } + /** + * required string pong = 1; + */ public Builder clearPong() { bitField0_ = (bitField0_ & ~0x00000001); pong_ = getDefaultInstance().getPong(); onChanged(); return this; } - void setPong(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string pong = 1; + */ + public Builder setPongBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; pong_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:PingResponse) } - + static { defaultInstance = new PingResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:PingResponse) } - + public interface CountRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code CountRequest} + */ public static final class CountRequest extends com.google.protobuf.GeneratedMessage implements CountRequestOrBuilder { // Use CountRequest.newBuilder() to construct. - private CountRequest(Builder builder) { + private CountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CountRequest(boolean noInit) {} - + private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CountRequest defaultInstance; public static CountRequest getDefaultInstance() { return defaultInstance; } - + public CountRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CountRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CountRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CountRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -799,101 +999,95 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CountRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequestOrBuilder { @@ -901,18 +1095,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -923,25 +1120,25 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = buildPartial(); if (!result.isInitialized()) { @@ -949,23 +1146,13 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest)other); @@ -974,102 +1161,171 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:CountRequest) } - + static { defaultInstance = new CountRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CountRequest) } - + public interface CountResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 count = 1; + /** + * required int32 count = 1; + */ boolean hasCount(); + /** + * required int32 count = 1; + */ int getCount(); } + /** + * Protobuf type {@code CountResponse} + */ public static final class CountResponse extends com.google.protobuf.GeneratedMessage implements CountResponseOrBuilder { // Use CountResponse.newBuilder() to construct. - private CountResponse(Builder builder) { + private CountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private CountResponse(boolean noInit) {} - + private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final CountResponse defaultInstance; public static CountResponse getDefaultInstance() { return defaultInstance; } - + public CountResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CountResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + count_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CountResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CountResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required int32 count = 1; public static final int COUNT_FIELD_NUMBER = 1; private int count_; + /** + * required int32 count = 1; + */ public boolean hasCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 count = 1; + */ public int getCount() { return count_; } - + private void initFields() { count_ = 0; } @@ -1077,7 +1333,7 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCount()) { memoizedIsInitialized = 0; return false; @@ -1085,7 +1341,7 @@ public final class PingProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1094,12 +1350,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1109,14 +1365,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1126,7 +1382,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) obj; - + boolean result = true; result = result && (hasCount() == other.hasCount()); if (hasCount()) { @@ -1137,9 +1393,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCount()) { @@ -1147,89 +1407,79 @@ public final class PingProtos { hash = (53 * hash) + getCount(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code CountResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponseOrBuilder { @@ -1237,18 +1487,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1259,27 +1512,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); count_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = buildPartial(); if (!result.isInitialized()) { @@ -1287,17 +1540,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse(this); int from_bitField0_ = bitField0_; @@ -1310,7 +1553,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse)other); @@ -1319,7 +1562,7 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance()) return this; if (other.hasCount()) { @@ -1328,7 +1571,7 @@ public final class PingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCount()) { @@ -1336,119 +1579,195 @@ public final class PingProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - count_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 count = 1; private int count_ ; + /** + * required int32 count = 1; + */ public boolean hasCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 count = 1; + */ public int getCount() { return count_; } + /** + * required int32 count = 1; + */ public Builder setCount(int value) { bitField0_ |= 0x00000001; count_ = value; onChanged(); return this; } + /** + * required int32 count = 1; + */ public Builder clearCount() { bitField0_ = (bitField0_ & ~0x00000001); count_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:CountResponse) } - + static { defaultInstance = new CountResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:CountResponse) } - + public interface IncrementCountRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 diff = 1; + /** + * required int32 diff = 1; + */ boolean hasDiff(); + /** + * required int32 diff = 1; + */ int getDiff(); } + /** + * Protobuf type {@code IncrementCountRequest} + */ public static final class IncrementCountRequest extends com.google.protobuf.GeneratedMessage implements IncrementCountRequestOrBuilder { // Use IncrementCountRequest.newBuilder() to construct. - private IncrementCountRequest(Builder builder) { + private IncrementCountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IncrementCountRequest(boolean noInit) {} - + private IncrementCountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IncrementCountRequest defaultInstance; public static IncrementCountRequest getDefaultInstance() { return defaultInstance; } - + public IncrementCountRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IncrementCountRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + diff_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IncrementCountRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IncrementCountRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required int32 diff = 1; public static final int DIFF_FIELD_NUMBER = 1; private int diff_; + /** + * required int32 diff = 1; + */ public boolean hasDiff() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 diff = 1; + */ public int getDiff() { return diff_; } - + private void initFields() { diff_ = 0; } @@ -1456,7 +1775,7 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasDiff()) { memoizedIsInitialized = 0; return false; @@ -1464,7 +1783,7 @@ public final class PingProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1473,12 +1792,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1488,14 +1807,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1505,7 +1824,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest) obj; - + boolean result = true; result = result && (hasDiff() == other.hasDiff()); if (hasDiff()) { @@ -1516,9 +1835,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDiff()) { @@ -1526,89 +1849,79 @@ public final class PingProtos { hash = (53 * hash) + getDiff(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IncrementCountRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequestOrBuilder { @@ -1616,18 +1929,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1638,27 +1954,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); diff_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = buildPartial(); if (!result.isInitialized()) { @@ -1666,17 +1982,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest(this); int from_bitField0_ = bitField0_; @@ -1689,7 +1995,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest)other); @@ -1698,7 +2004,7 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance()) return this; if (other.hasDiff()) { @@ -1707,7 +2013,7 @@ public final class PingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasDiff()) { @@ -1715,119 +2021,195 @@ public final class PingProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - diff_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 diff = 1; private int diff_ ; + /** + * required int32 diff = 1; + */ public boolean hasDiff() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 diff = 1; + */ public int getDiff() { return diff_; } + /** + * required int32 diff = 1; + */ public Builder setDiff(int value) { bitField0_ |= 0x00000001; diff_ = value; onChanged(); return this; } + /** + * required int32 diff = 1; + */ public Builder clearDiff() { bitField0_ = (bitField0_ & ~0x00000001); diff_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IncrementCountRequest) } - + static { defaultInstance = new IncrementCountRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IncrementCountRequest) } - + public interface IncrementCountResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 count = 1; + /** + * required int32 count = 1; + */ boolean hasCount(); + /** + * required int32 count = 1; + */ int getCount(); } + /** + * Protobuf type {@code IncrementCountResponse} + */ public static final class IncrementCountResponse extends com.google.protobuf.GeneratedMessage implements IncrementCountResponseOrBuilder { // Use IncrementCountResponse.newBuilder() to construct. - private IncrementCountResponse(Builder builder) { + private IncrementCountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private IncrementCountResponse(boolean noInit) {} - + private IncrementCountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final IncrementCountResponse defaultInstance; public static IncrementCountResponse getDefaultInstance() { return defaultInstance; } - + public IncrementCountResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IncrementCountResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + count_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IncrementCountResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IncrementCountResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 count = 1; public static final int COUNT_FIELD_NUMBER = 1; private int count_; + /** + * required int32 count = 1; + */ public boolean hasCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 count = 1; + */ public int getCount() { return count_; } - + private void initFields() { count_ = 0; } @@ -1835,7 +2217,7 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasCount()) { memoizedIsInitialized = 0; return false; @@ -1843,7 +2225,7 @@ public final class PingProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1852,12 +2234,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1867,14 +2249,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1884,7 +2266,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) obj; - + boolean result = true; result = result && (hasCount() == other.hasCount()); if (hasCount()) { @@ -1895,9 +2277,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCount()) { @@ -1905,89 +2291,79 @@ public final class PingProtos { hash = (53 * hash) + getCount(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code IncrementCountResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponseOrBuilder { @@ -1995,18 +2371,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2017,27 +2396,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); count_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2045,17 +2424,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse(this); int from_bitField0_ = bitField0_; @@ -2068,7 +2437,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse)other); @@ -2077,7 +2446,7 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance()) return this; if (other.hasCount()) { @@ -2086,7 +2455,7 @@ public final class PingProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasCount()) { @@ -2094,141 +2463,227 @@ public final class PingProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - count_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 count = 1; private int count_ ; + /** + * required int32 count = 1; + */ public boolean hasCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 count = 1; + */ public int getCount() { return count_; } + /** + * required int32 count = 1; + */ public Builder setCount(int value) { bitField0_ |= 0x00000001; count_ = value; onChanged(); return this; } + /** + * required int32 count = 1; + */ public Builder clearCount() { bitField0_ = (bitField0_ & ~0x00000001); count_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:IncrementCountResponse) } - + static { defaultInstance = new IncrementCountResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:IncrementCountResponse) } - + public interface HelloRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string name = 1; + /** + * optional string name = 1; + */ boolean hasName(); - String getName(); + /** + * optional string name = 1; + */ + java.lang.String getName(); + /** + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); } + /** + * Protobuf type {@code HelloRequest} + */ public static final class HelloRequest extends com.google.protobuf.GeneratedMessage implements HelloRequestOrBuilder { // Use HelloRequest.newBuilder() to construct. - private HelloRequest(Builder builder) { + private HelloRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private HelloRequest(boolean noInit) {} - + private HelloRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final HelloRequest defaultInstance; public static HelloRequest getDefaultInstance() { return defaultInstance; } - + public HelloRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HelloRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HelloRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HelloRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; + /** + * optional string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * optional string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { name_ = s; } return s; } } - private com.google.protobuf.ByteString getNameBytes() { + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { java.lang.Object ref = name_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { name_ = ""; } @@ -2236,11 +2691,11 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2249,12 +2704,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2264,14 +2719,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2281,7 +2736,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest) obj; - + boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { @@ -2292,9 +2747,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { @@ -2302,89 +2761,79 @@ public final class PingProtos { hash = (53 * hash) + getName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code HelloRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequestOrBuilder { @@ -2392,18 +2841,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2414,27 +2866,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = buildPartial(); if (!result.isInitialized()) { @@ -2442,17 +2894,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest(this); int from_bitField0_ = bitField0_; @@ -2465,7 +2907,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest)other); @@ -2474,70 +2916,84 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance()) return this; if (other.hasName()) { - setName(other.getName()); + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string name = 1; private java.lang.Object name_ = ""; + /** + * optional string name = 1; + */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getName() { + /** + * optional string name = 1; + */ + public java.lang.String getName() { java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); name_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setName(String value) { + /** + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2546,97 +3002,197 @@ public final class PingProtos { onChanged(); return this; } + /** + * optional string name = 1; + */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; name_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:HelloRequest) } - + static { defaultInstance = new HelloRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:HelloRequest) } - + public interface HelloResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // optional string response = 1; + /** + * optional string response = 1; + */ boolean hasResponse(); - String getResponse(); + /** + * optional string response = 1; + */ + java.lang.String getResponse(); + /** + * optional string response = 1; + */ + com.google.protobuf.ByteString + getResponseBytes(); } + /** + * Protobuf type {@code HelloResponse} + */ public static final class HelloResponse extends com.google.protobuf.GeneratedMessage implements HelloResponseOrBuilder { // Use HelloResponse.newBuilder() to construct. - private HelloResponse(Builder builder) { + private HelloResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private HelloResponse(boolean noInit) {} - + private HelloResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final HelloResponse defaultInstance; public static HelloResponse getDefaultInstance() { return defaultInstance; } - + public HelloResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HelloResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + response_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HelloResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HelloResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // optional string response = 1; public static final int RESPONSE_FIELD_NUMBER = 1; private java.lang.Object response_; + /** + * optional string response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getResponse() { + /** + * optional string response = 1; + */ + public java.lang.String getResponse() { java.lang.Object ref = response_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { response_ = s; } return s; } } - private com.google.protobuf.ByteString getResponseBytes() { + /** + * optional string response = 1; + */ + public com.google.protobuf.ByteString + getResponseBytes() { java.lang.Object ref = response_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); response_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { response_ = ""; } @@ -2644,11 +3200,11 @@ public final class PingProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -2657,12 +3213,12 @@ public final class PingProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -2672,14 +3228,14 @@ public final class PingProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -2689,7 +3245,7 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) obj; - + boolean result = true; result = result && (hasResponse() == other.hasResponse()); if (hasResponse()) { @@ -2700,9 +3256,13 @@ public final class PingProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResponse()) { @@ -2710,89 +3270,79 @@ public final class PingProtos { hash = (53 * hash) + getResponse().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code HelloResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponseOrBuilder { @@ -2800,18 +3350,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -2822,27 +3375,27 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); response_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = buildPartial(); if (!result.isInitialized()) { @@ -2850,17 +3403,7 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse(this); int from_bitField0_ = bitField0_; @@ -2873,7 +3416,7 @@ public final class PingProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse)other); @@ -2882,70 +3425,84 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance()) return this; if (other.hasResponse()) { - setResponse(other.getResponse()); + bitField0_ |= 0x00000001; + response_ = other.response_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - response_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // optional string response = 1; private java.lang.Object response_ = ""; + /** + * optional string response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getResponse() { + /** + * optional string response = 1; + */ + public java.lang.String getResponse() { java.lang.Object ref = response_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); response_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * optional string response = 1; + */ + public com.google.protobuf.ByteString + getResponseBytes() { + java.lang.Object ref = response_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + response_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setResponse(String value) { + /** + * optional string response = 1; + */ + public Builder setResponse( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -2954,95 +3511,167 @@ public final class PingProtos { onChanged(); return this; } + /** + * optional string response = 1; + */ public Builder clearResponse() { bitField0_ = (bitField0_ & ~0x00000001); response_ = getDefaultInstance().getResponse(); onChanged(); return this; } - void setResponse(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * optional string response = 1; + */ + public Builder setResponseBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; response_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:HelloResponse) } - + static { defaultInstance = new HelloResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:HelloResponse) } - + public interface NoopRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code NoopRequest} + */ public static final class NoopRequest extends com.google.protobuf.GeneratedMessage implements NoopRequestOrBuilder { // Use NoopRequest.newBuilder() to construct. - private NoopRequest(Builder builder) { + private NoopRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NoopRequest(boolean noInit) {} - + private NoopRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NoopRequest defaultInstance; public static NoopRequest getDefaultInstance() { return defaultInstance; } - + public NoopRequest getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NoopRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NoopRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NoopRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3052,101 +3681,95 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NoopRequest} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequestOrBuilder { @@ -3154,18 +3777,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3176,25 +3802,25 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = buildPartial(); if (!result.isInitialized()) { @@ -3202,23 +3828,13 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest)other); @@ -3227,122 +3843,173 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:NoopRequest) } - + static { defaultInstance = new NoopRequest(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NoopRequest) } - + public interface NoopResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code NoopResponse} + */ public static final class NoopResponse extends com.google.protobuf.GeneratedMessage implements NoopResponseOrBuilder { // Use NoopResponse.newBuilder() to construct. - private NoopResponse(Builder builder) { + private NoopResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private NoopResponse(boolean noInit) {} - + private NoopResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final NoopResponse defaultInstance; public static NoopResponse getDefaultInstance() { return defaultInstance; } - + public NoopResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private NoopResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NoopResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NoopResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -3352,101 +4019,95 @@ public final class PingProtos { return super.equals(obj); } org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code NoopResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponseOrBuilder { @@ -3454,18 +4115,21 @@ public final class PingProtos { getDescriptor() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -3476,25 +4140,25 @@ public final class PingProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDescriptor(); + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_descriptor; } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse build() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = buildPartial(); if (!result.isInitialized()) { @@ -3502,23 +4166,13 @@ public final class PingProtos { } return result; } - - private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse buildPartial() { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse)other); @@ -3527,88 +4181,96 @@ public final class PingProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse other) { if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:NoopResponse) } - + static { defaultInstance = new NoopResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:NoopResponse) } - + + /** + * Protobuf service {@code PingService} + */ public static abstract class PingService implements com.google.protobuf.Service { protected PingService() {} - + public interface Interface { + /** + * rpc ping(.PingRequest) returns (.PingResponse); + */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc count(.CountRequest) returns (.CountResponse); + */ public abstract void count( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc increment(.IncrementCountRequest) returns (.IncrementCountResponse); + */ public abstract void increment( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc hello(.HelloRequest) returns (.HelloResponse); + */ public abstract void hello( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc noop(.NoopRequest) returns (.NoopResponse); + */ public abstract void noop( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new PingService() { @@ -3619,7 +4281,7 @@ public final class PingProtos { com.google.protobuf.RpcCallback done) { impl.ping(controller, request, done); } - + @java.lang.Override public void count( com.google.protobuf.RpcController controller, @@ -3627,7 +4289,7 @@ public final class PingProtos { com.google.protobuf.RpcCallback done) { impl.count(controller, request, done); } - + @java.lang.Override public void increment( com.google.protobuf.RpcController controller, @@ -3635,7 +4297,7 @@ public final class PingProtos { com.google.protobuf.RpcCallback done) { impl.increment(controller, request, done); } - + @java.lang.Override public void hello( com.google.protobuf.RpcController controller, @@ -3643,7 +4305,7 @@ public final class PingProtos { com.google.protobuf.RpcCallback done) { impl.hello(controller, request, done); } - + @java.lang.Override public void noop( com.google.protobuf.RpcController controller, @@ -3651,10 +4313,10 @@ public final class PingProtos { com.google.protobuf.RpcCallback done) { impl.noop(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -3662,7 +4324,7 @@ public final class PingProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3688,7 +4350,7 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3712,7 +4374,7 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3736,35 +4398,50 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc ping(.PingRequest) returns (.PingResponse); + */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc count(.CountRequest) returns (.CountResponse); + */ public abstract void count( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc increment(.IncrementCountRequest) returns (.IncrementCountResponse); + */ public abstract void increment( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc hello(.HelloRequest) returns (.HelloResponse); + */ public abstract void hello( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, com.google.protobuf.RpcCallback done); - + + /** + * rpc noop(.NoopRequest) returns (.NoopResponse); + */ public abstract void noop( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -3774,7 +4451,7 @@ public final class PingProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -3816,7 +4493,7 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3840,7 +4517,7 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -3864,23 +4541,23 @@ public final class PingProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, @@ -3895,7 +4572,7 @@ public final class PingProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance())); } - + public void count( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, @@ -3910,7 +4587,7 @@ public final class PingProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance())); } - + public void increment( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, @@ -3925,7 +4602,7 @@ public final class PingProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance())); } - + public void hello( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, @@ -3940,7 +4617,7 @@ public final class PingProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance())); } - + public void noop( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, @@ -3956,46 +4633,46 @@ public final class PingProtos { org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse count( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse increment( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse hello( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse noop( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request) @@ -4006,8 +4683,8 @@ public final class PingProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse count( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request) @@ -4018,8 +4695,8 @@ public final class PingProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse increment( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request) @@ -4030,8 +4707,8 @@ public final class PingProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse hello( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request) @@ -4042,8 +4719,8 @@ public final class PingProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse noop( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request) @@ -4054,10 +4731,12 @@ public final class PingProtos { request, org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:PingService) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_PingRequest_descriptor; private static @@ -4108,7 +4787,7 @@ public final class PingProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_NoopResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -4143,81 +4822,61 @@ public final class PingProtos { internal_static_PingRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PingRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.Builder.class); + new java.lang.String[] { }); internal_static_PingResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_PingResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PingResponse_descriptor, - new java.lang.String[] { "Pong", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.Builder.class); + new java.lang.String[] { "Pong", }); internal_static_CountRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_CountRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CountRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.Builder.class); + new java.lang.String[] { }); internal_static_CountResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_CountResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CountResponse_descriptor, - new java.lang.String[] { "Count", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.Builder.class); + new java.lang.String[] { "Count", }); internal_static_IncrementCountRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_IncrementCountRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IncrementCountRequest_descriptor, - new java.lang.String[] { "Diff", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.Builder.class); + new java.lang.String[] { "Diff", }); internal_static_IncrementCountResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_IncrementCountResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IncrementCountResponse_descriptor, - new java.lang.String[] { "Count", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.Builder.class); + new java.lang.String[] { "Count", }); internal_static_HelloRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_HelloRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_HelloRequest_descriptor, - new java.lang.String[] { "Name", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.Builder.class); + new java.lang.String[] { "Name", }); internal_static_HelloResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_HelloResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_HelloResponse_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.Builder.class); + new java.lang.String[] { "Response", }); internal_static_NoopRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_NoopRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NoopRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.Builder.class); + new java.lang.String[] { }); internal_static_NoopResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_NoopResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NoopResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.Builder.class); + new java.lang.String[] { }); return null; } }; @@ -4226,6 +4885,6 @@ public final class PingProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java index 22bed84..53b8ca2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java @@ -10,50 +10,129 @@ public final class TestDelayedRpcProtos { } public interface TestArgOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required bool delay = 1; + /** + * required bool delay = 1; + */ boolean hasDelay(); + /** + * required bool delay = 1; + */ boolean getDelay(); } + /** + * Protobuf type {@code TestArg} + */ public static final class TestArg extends com.google.protobuf.GeneratedMessage implements TestArgOrBuilder { // Use TestArg.newBuilder() to construct. - private TestArg(Builder builder) { + private TestArg(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TestArg(boolean noInit) {} - + private TestArg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TestArg defaultInstance; public static TestArg getDefaultInstance() { return defaultInstance; } - + public TestArg getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TestArg( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + delay_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TestArg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TestArg(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required bool delay = 1; public static final int DELAY_FIELD_NUMBER = 1; private boolean delay_; + /** + * required bool delay = 1; + */ public boolean hasDelay() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool delay = 1; + */ public boolean getDelay() { return delay_; } - + private void initFields() { delay_ = false; } @@ -61,7 +140,7 @@ public final class TestDelayedRpcProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasDelay()) { memoizedIsInitialized = 0; return false; @@ -69,7 +148,7 @@ public final class TestDelayedRpcProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -78,12 +157,12 @@ public final class TestDelayedRpcProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -93,14 +172,14 @@ public final class TestDelayedRpcProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -110,7 +189,7 @@ public final class TestDelayedRpcProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg) obj; - + boolean result = true; result = result && (hasDelay() == other.hasDelay()); if (hasDelay()) { @@ -121,9 +200,13 @@ public final class TestDelayedRpcProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDelay()) { @@ -131,89 +214,79 @@ public final class TestDelayedRpcProtos { hash = (53 * hash) + hashBoolean(getDelay()); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TestArg} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArgOrBuilder { @@ -221,18 +294,21 @@ public final class TestDelayedRpcProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -243,27 +319,27 @@ public final class TestDelayedRpcProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); delay_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = buildPartial(); if (!result.isInitialized()) { @@ -271,17 +347,7 @@ public final class TestDelayedRpcProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg(this); int from_bitField0_ = bitField0_; @@ -294,7 +360,7 @@ public final class TestDelayedRpcProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg)other); @@ -303,7 +369,7 @@ public final class TestDelayedRpcProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDefaultInstance()) return this; if (other.hasDelay()) { @@ -312,7 +378,7 @@ public final class TestDelayedRpcProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasDelay()) { @@ -320,119 +386,195 @@ public final class TestDelayedRpcProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - delay_ = input.readBool(); - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required bool delay = 1; private boolean delay_ ; + /** + * required bool delay = 1; + */ public boolean hasDelay() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required bool delay = 1; + */ public boolean getDelay() { return delay_; } + /** + * required bool delay = 1; + */ public Builder setDelay(boolean value) { bitField0_ |= 0x00000001; delay_ = value; onChanged(); return this; } + /** + * required bool delay = 1; + */ public Builder clearDelay() { bitField0_ = (bitField0_ & ~0x00000001); delay_ = false; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TestArg) } - + static { defaultInstance = new TestArg(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TestArg) } - + public interface TestResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required int32 response = 1; + /** + * required int32 response = 1; + */ boolean hasResponse(); + /** + * required int32 response = 1; + */ int getResponse(); } + /** + * Protobuf type {@code TestResponse} + */ public static final class TestResponse extends com.google.protobuf.GeneratedMessage implements TestResponseOrBuilder { // Use TestResponse.newBuilder() to construct. - private TestResponse(Builder builder) { + private TestResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private TestResponse(boolean noInit) {} - + private TestResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final TestResponse defaultInstance; public static TestResponse getDefaultInstance() { return defaultInstance; } - + public TestResponse getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TestResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + response_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TestResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TestResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required int32 response = 1; public static final int RESPONSE_FIELD_NUMBER = 1; private int response_; + /** + * required int32 response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 response = 1; + */ public int getResponse() { return response_; } - + private void initFields() { response_ = 0; } @@ -440,7 +582,7 @@ public final class TestDelayedRpcProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasResponse()) { memoizedIsInitialized = 0; return false; @@ -448,7 +590,7 @@ public final class TestDelayedRpcProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -457,12 +599,12 @@ public final class TestDelayedRpcProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -472,14 +614,14 @@ public final class TestDelayedRpcProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -489,7 +631,7 @@ public final class TestDelayedRpcProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) obj; - + boolean result = true; result = result && (hasResponse() == other.hasResponse()); if (hasResponse()) { @@ -500,9 +642,13 @@ public final class TestDelayedRpcProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResponse()) { @@ -510,89 +656,79 @@ public final class TestDelayedRpcProtos { hash = (53 * hash) + getResponse(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code TestResponse} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponseOrBuilder { @@ -600,18 +736,21 @@ public final class TestDelayedRpcProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -622,27 +761,27 @@ public final class TestDelayedRpcProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); response_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = buildPartial(); if (!result.isInitialized()) { @@ -650,17 +789,7 @@ public final class TestDelayedRpcProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse(this); int from_bitField0_ = bitField0_; @@ -673,7 +802,7 @@ public final class TestDelayedRpcProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse)other); @@ -682,7 +811,7 @@ public final class TestDelayedRpcProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance()) return this; if (other.hasResponse()) { @@ -691,7 +820,7 @@ public final class TestDelayedRpcProtos { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasResponse()) { @@ -699,73 +828,70 @@ public final class TestDelayedRpcProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - response_ = input.readInt32(); - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required int32 response = 1; private int response_ ; + /** + * required int32 response = 1; + */ public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } + /** + * required int32 response = 1; + */ public int getResponse() { return response_; } + /** + * required int32 response = 1; + */ public Builder setResponse(int value) { bitField0_ |= 0x00000001; response_ = value; onChanged(); return this; } + /** + * required int32 response = 1; + */ public Builder clearResponse() { bitField0_ = (bitField0_ & ~0x00000001); response_ = 0; onChanged(); return this; } - + // @@protoc_insertion_point(builder_scope:TestResponse) } - + static { defaultInstance = new TestResponse(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:TestResponse) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_TestArg_descriptor; private static @@ -776,7 +902,7 @@ public final class TestDelayedRpcProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TestResponse_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -801,17 +927,13 @@ public final class TestDelayedRpcProtos { internal_static_TestArg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TestArg_descriptor, - new java.lang.String[] { "Delay", }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class); + new java.lang.String[] { "Delay", }); internal_static_TestResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_TestResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TestResponse_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class); + new java.lang.String[] { "Response", }); return null; } }; @@ -820,6 +942,6 @@ public final class TestDelayedRpcProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java index 7267df8..3da0254 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java @@ -11,69 +11,130 @@ public final class TestProtos { public interface EmptyRequestProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code EmptyRequestProto} + */ public static final class EmptyRequestProto extends com.google.protobuf.GeneratedMessage implements EmptyRequestProtoOrBuilder { // Use EmptyRequestProto.newBuilder() to construct. - private EmptyRequestProto(Builder builder) { + private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EmptyRequestProto(boolean noInit) {} - + private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EmptyRequestProto defaultInstance; public static EmptyRequestProto getDefaultInstance() { return defaultInstance; } - + public EmptyRequestProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EmptyRequestProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EmptyRequestProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyRequestProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -83,101 +144,95 @@ public final class TestProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EmptyRequestProto} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProtoOrBuilder { @@ -185,18 +240,21 @@ public final class TestProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -207,25 +265,25 @@ public final class TestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto result = buildPartial(); if (!result.isInitialized()) { @@ -233,23 +291,13 @@ public final class TestProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)other); @@ -258,122 +306,173 @@ public final class TestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:EmptyRequestProto) } - + static { defaultInstance = new EmptyRequestProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EmptyRequestProto) } - + public interface EmptyResponseProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { } + /** + * Protobuf type {@code EmptyResponseProto} + */ public static final class EmptyResponseProto extends com.google.protobuf.GeneratedMessage implements EmptyResponseProtoOrBuilder { // Use EmptyResponseProto.newBuilder() to construct. - private EmptyResponseProto(Builder builder) { + private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EmptyResponseProto(boolean noInit) {} - + private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EmptyResponseProto defaultInstance; public static EmptyResponseProto getDefaultInstance() { return defaultInstance; } - + public EmptyResponseProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EmptyResponseProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EmptyResponseProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyResponseProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -383,101 +482,95 @@ public final class TestProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) obj; - + boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EmptyResponseProto} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProtoOrBuilder { @@ -485,18 +578,21 @@ public final class TestProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -507,25 +603,25 @@ public final class TestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto result = buildPartial(); if (!result.isInitialized()) { @@ -533,23 +629,13 @@ public final class TestProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto(this); onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto)other); @@ -558,124 +644,203 @@ public final class TestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - - + // @@protoc_insertion_point(builder_scope:EmptyResponseProto) } - + static { defaultInstance = new EmptyResponseProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EmptyResponseProto) } - + public interface EchoRequestProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string message = 1; + /** + * required string message = 1; + */ boolean hasMessage(); - String getMessage(); + /** + * required string message = 1; + */ + java.lang.String getMessage(); + /** + * required string message = 1; + */ + com.google.protobuf.ByteString + getMessageBytes(); } + /** + * Protobuf type {@code EchoRequestProto} + */ public static final class EchoRequestProto extends com.google.protobuf.GeneratedMessage implements EchoRequestProtoOrBuilder { // Use EchoRequestProto.newBuilder() to construct. - private EchoRequestProto(Builder builder) { + private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EchoRequestProto(boolean noInit) {} - + private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EchoRequestProto defaultInstance; public static EchoRequestProto getDefaultInstance() { return defaultInstance; } - + public EchoRequestProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EchoRequestProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + message_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EchoRequestProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EchoRequestProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - + private int bitField0_; // required string message = 1; public static final int MESSAGE_FIELD_NUMBER = 1; private java.lang.Object message_; + /** + * required string message = 1; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getMessage() { + /** + * required string message = 1; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { message_ = s; } return s; } } - private com.google.protobuf.ByteString getMessageBytes() { + /** + * required string message = 1; + */ + public com.google.protobuf.ByteString + getMessageBytes() { java.lang.Object ref = message_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { message_ = ""; } @@ -683,7 +848,7 @@ public final class TestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasMessage()) { memoizedIsInitialized = 0; return false; @@ -691,7 +856,7 @@ public final class TestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -700,12 +865,12 @@ public final class TestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -715,14 +880,14 @@ public final class TestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -732,7 +897,7 @@ public final class TestProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto) obj; - + boolean result = true; result = result && (hasMessage() == other.hasMessage()); if (hasMessage()) { @@ -743,9 +908,13 @@ public final class TestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMessage()) { @@ -753,89 +922,79 @@ public final class TestProtos { hash = (53 * hash) + getMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EchoRequestProto} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProtoOrBuilder { @@ -843,18 +1002,21 @@ public final class TestProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -865,27 +1027,27 @@ public final class TestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); message_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto result = buildPartial(); if (!result.isInitialized()) { @@ -893,17 +1055,7 @@ public final class TestProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto(this); int from_bitField0_ = bitField0_; @@ -916,7 +1068,7 @@ public final class TestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto)other); @@ -925,16 +1077,18 @@ public final class TestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance()) return this; if (other.hasMessage()) { - setMessage(other.getMessage()); + bitField0_ |= 0x00000001; + message_ = other.message_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasMessage()) { @@ -942,57 +1096,69 @@ public final class TestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - message_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string message = 1; private java.lang.Object message_ = ""; + /** + * required string message = 1; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getMessage() { + /** + * required string message = 1; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); message_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; } } - public Builder setMessage(String value) { + /** + * required string message = 1; + */ + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string message = 1; + */ + public Builder setMessage( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1001,97 +1167,197 @@ public final class TestProtos { onChanged(); return this; } + /** + * required string message = 1; + */ public Builder clearMessage() { bitField0_ = (bitField0_ & ~0x00000001); message_ = getDefaultInstance().getMessage(); onChanged(); return this; } - void setMessage(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string message = 1; + */ + public Builder setMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; message_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:EchoRequestProto) } - + static { defaultInstance = new EchoRequestProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EchoRequestProto) } - + public interface EchoResponseProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - + // required string message = 1; + /** + * required string message = 1; + */ boolean hasMessage(); - String getMessage(); + /** + * required string message = 1; + */ + java.lang.String getMessage(); + /** + * required string message = 1; + */ + com.google.protobuf.ByteString + getMessageBytes(); } + /** + * Protobuf type {@code EchoResponseProto} + */ public static final class EchoResponseProto extends com.google.protobuf.GeneratedMessage implements EchoResponseProtoOrBuilder { // Use EchoResponseProto.newBuilder() to construct. - private EchoResponseProto(Builder builder) { + private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); + this.unknownFields = builder.getUnknownFields(); } - private EchoResponseProto(boolean noInit) {} - + private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final EchoResponseProto defaultInstance; public static EchoResponseProto getDefaultInstance() { return defaultInstance; } - + public EchoResponseProto getDefaultInstanceForType() { return defaultInstance; } - + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EchoResponseProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + message_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.Builder.class); } - + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public EchoResponseProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EchoResponseProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + private int bitField0_; // required string message = 1; public static final int MESSAGE_FIELD_NUMBER = 1; private java.lang.Object message_; + /** + * required string message = 1; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getMessage() { + /** + * required string message = 1; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (ref instanceof String) { - return (String) ref; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { message_ = s; } return s; } } - private com.google.protobuf.ByteString getMessageBytes() { + /** + * required string message = 1; + */ + public com.google.protobuf.ByteString + getMessageBytes() { java.lang.Object ref = message_; - if (ref instanceof String) { + if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - + private void initFields() { message_ = ""; } @@ -1099,7 +1365,7 @@ public final class TestProtos { public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - + if (!hasMessage()) { memoizedIsInitialized = 0; return false; @@ -1107,7 +1373,7 @@ public final class TestProtos { memoizedIsInitialized = 1; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -1116,12 +1382,12 @@ public final class TestProtos { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream @@ -1131,14 +1397,14 @@ public final class TestProtos { memoizedSerializedSize = size; return size; } - + private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } - + @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { @@ -1148,7 +1414,7 @@ public final class TestProtos { return super.equals(obj); } org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto other = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto) obj; - + boolean result = true; result = result && (hasMessage() == other.hasMessage()); if (hasMessage()) { @@ -1159,9 +1425,13 @@ public final class TestProtos { getUnknownFields().equals(other.getUnknownFields()); return result; } - + + private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMessage()) { @@ -1169,89 +1439,79 @@ public final class TestProtos { hash = (53 * hash) + getMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; return hash; } - + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); + return PARSER.parseFrom(input, extensionRegistry); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** + * Protobuf type {@code EchoResponseProto} + */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProtoOrBuilder { @@ -1259,18 +1519,21 @@ public final class TestProtos { getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable; + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.Builder.class); } - + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } - - private Builder(BuilderParent parent) { + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } @@ -1281,27 +1544,27 @@ public final class TestProtos { private static Builder create() { return new Builder(); } - + public Builder clear() { super.clear(); message_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } - + public Builder clone() { return create().mergeFrom(buildPartial()); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDescriptor(); + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance(); } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto build() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto result = buildPartial(); if (!result.isInitialized()) { @@ -1309,17 +1572,7 @@ public final class TestProtos { } return result; } - - private org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto buildPartial() { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto(this); int from_bitField0_ = bitField0_; @@ -1332,7 +1585,7 @@ public final class TestProtos { onBuilt(); return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto) { return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto)other); @@ -1341,16 +1594,18 @@ public final class TestProtos { return this; } } - + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto other) { if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()) return this; if (other.hasMessage()) { - setMessage(other.getMessage()); + bitField0_ |= 0x00000001; + message_ = other.message_; + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public final boolean isInitialized() { if (!hasMessage()) { @@ -1358,57 +1613,69 @@ public final class TestProtos { } return true; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - message_ = input.readBytes(); - break; - } + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - private int bitField0_; - + // required string message = 1; private java.lang.Object message_ = ""; + /** + * required string message = 1; + */ public boolean hasMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getMessage() { + /** + * required string message = 1; + */ + public java.lang.String getMessage() { java.lang.Object ref = message_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); message_ = s; return s; } else { - return (String) ref; + return (java.lang.String) ref; + } + } + /** + * required string message = 1; + */ + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } } - public Builder setMessage(String value) { + /** + * required string message = 1; + */ + public Builder setMessage( + java.lang.String value) { if (value == null) { throw new NullPointerException(); } @@ -1417,29 +1684,40 @@ public final class TestProtos { onChanged(); return this; } + /** + * required string message = 1; + */ public Builder clearMessage() { bitField0_ = (bitField0_ & ~0x00000001); message_ = getDefaultInstance().getMessage(); onChanged(); return this; } - void setMessage(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; + /** + * required string message = 1; + */ + public Builder setMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; message_ = value; onChanged(); + return this; } - + // @@protoc_insertion_point(builder_scope:EchoResponseProto) } - + static { defaultInstance = new EchoResponseProto(true); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:EchoResponseProto) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_EmptyRequestProto_descriptor; private static @@ -1460,7 +1738,7 @@ public final class TestProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EchoResponseProto_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1485,33 +1763,25 @@ public final class TestProtos { internal_static_EmptyRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EmptyRequestProto_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.Builder.class); + new java.lang.String[] { }); internal_static_EmptyResponseProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_EmptyResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EmptyResponseProto_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.Builder.class); + new java.lang.String[] { }); internal_static_EchoRequestProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_EchoRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EchoRequestProto_descriptor, - new java.lang.String[] { "Message", }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.Builder.class); + new java.lang.String[] { "Message", }); internal_static_EchoResponseProto_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_EchoResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EchoResponseProto_descriptor, - new java.lang.String[] { "Message", }, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.Builder.class); + new java.lang.String[] { "Message", }); return null; } }; @@ -1520,6 +1790,6 @@ public final class TestProtos { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java index c2a5424..3fd34e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java @@ -8,28 +8,45 @@ public final class TestRpcServiceProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + /** + * Protobuf service {@code TestProtobufRpcProto} + * + *
+   **
+   * A protobuf service for use in tests
+   * 
+ */ public static abstract class TestProtobufRpcProto implements com.google.protobuf.Service { protected TestProtobufRpcProto() {} - + public interface Interface { + /** + * rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto); + */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback done); - + + /** + * rpc echo(.EchoRequestProto) returns (.EchoResponseProto); + */ public abstract void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback done); - + + /** + * rpc error(.EmptyRequestProto) returns (.EmptyResponseProto); + */ public abstract void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback done); - + } - + public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new TestProtobufRpcProto() { @@ -40,7 +57,7 @@ public final class TestRpcServiceProtos { com.google.protobuf.RpcCallback done) { impl.ping(controller, request, done); } - + @java.lang.Override public void echo( com.google.protobuf.RpcController controller, @@ -48,7 +65,7 @@ public final class TestRpcServiceProtos { com.google.protobuf.RpcCallback done) { impl.echo(controller, request, done); } - + @java.lang.Override public void error( com.google.protobuf.RpcController controller, @@ -56,10 +73,10 @@ public final class TestRpcServiceProtos { com.google.protobuf.RpcCallback done) { impl.error(controller, request, done); } - + }; } - + public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { @@ -67,7 +84,7 @@ public final class TestRpcServiceProtos { getDescriptorForType() { return getDescriptor(); } - + public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -89,7 +106,7 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -109,7 +126,7 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -129,25 +146,34 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + }; } - + + /** + * rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto); + */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback done); - + + /** + * rpc echo(.EchoRequestProto) returns (.EchoResponseProto); + */ public abstract void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback done); - + + /** + * rpc error(.EmptyRequestProto) returns (.EmptyResponseProto); + */ public abstract void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback done); - + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -157,7 +183,7 @@ public final class TestRpcServiceProtos { getDescriptorForType() { return getDescriptor(); } - + public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, @@ -189,7 +215,7 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -209,7 +235,7 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { @@ -229,23 +255,23 @@ public final class TestRpcServiceProtos { throw new java.lang.AssertionError("Can't get here."); } } - + public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } - + public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.RpcChannel channel; - + public com.google.protobuf.RpcChannel getChannel() { return channel; } - + public void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, @@ -260,7 +286,7 @@ public final class TestRpcServiceProtos { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance())); } - + public void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, @@ -275,7 +301,7 @@ public final class TestRpcServiceProtos { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance())); } - + public void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, @@ -291,36 +317,36 @@ public final class TestRpcServiceProtos { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance())); } } - + public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } - + public interface BlockingInterface { public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request) throws com.google.protobuf.ServiceException; - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException; } - + private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - + private final com.google.protobuf.BlockingRpcChannel channel; - + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) @@ -331,8 +357,8 @@ public final class TestRpcServiceProtos { request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request) @@ -343,8 +369,8 @@ public final class TestRpcServiceProtos { request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()); } - - + + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) @@ -355,11 +381,13 @@ public final class TestRpcServiceProtos { request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()); } - + } + + // @@protoc_insertion_point(class_scope:TestProtobufRpcProto) } - - + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -391,6 +419,6 @@ public final class TestRpcServiceProtos { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.getDescriptor(), }, assigner); } - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/hbase-server/src/test/protobuf/README.txt b/hbase-server/src/test/protobuf/README.txt new file mode 100644 index 0000000..45b8c05 --- /dev/null +++ b/hbase-server/src/test/protobuf/README.txt @@ -0,0 +1,27 @@ +These are the protobuf definition files used by tests. The produced java +classes are generated into src/test/java/org/apache/hadoop/hbase/protobuf/generated +and then checked in. The reasoning is that they change infrequently. + +To regnerate the classes after making definition file changes, ensure first that +the protobuf protoc tool is in your $PATH (You may need to download it and build +it first; its part of the protobuf package obtainable from here: +http://code.google.com/p/protobuf/downloads/list). Then run the following (You +should be able to just copy and paste the below into a terminal and hit return +-- the protoc compiler runs fast): + + UNIX_PROTO_DIR=src/test/protobuf + JAVA_DIR=src/test/java/ + mkdir -p $JAVA_DIR 2> /dev/null + if which cygpath 2> /dev/null; then + PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR` + JAVA_DIR=`cygpath --windows $JAVA_DIR` + else + PROTO_DIR=$UNIX_PROTO_DIR + fi + for PROTO_FILE in $UNIX_PROTO_DIR/*.proto + do + protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE + done + +After you've done the above, check it in and then check it in (or post a patch +on a JIRA with your definition file changes and the generated files). diff --git a/pom.xml b/pom.xml index 7a40c9f..89191c2 100644 --- a/pom.xml +++ b/pom.xml @@ -891,7 +891,7 @@ 1.4.3 1.2.17 1.9.0 - 2.4.1 + 2.5.0 1.0.1 0.9.0 3.4.5